iOS OpenGL ES 視頻采集分屏實現

話說天下大勢...話不多說,先上效果圖:


分屏效果圖.gif

要想實現這個效果,簡要步驟如下:
1、視頻采集
2、采集到的每一幀圖像作為紋理分別映射到屏幕上下均分區域

音視頻采集

#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "openGLManager.h"

@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>

@property (strong, nonatomic) AVCaptureDeviceInput       *cameraInput;//攝像頭輸入
@property (strong, nonatomic) AVCaptureDeviceInput       *audioMicInput;//麥克風輸入
@property (strong, nonatomic) AVCaptureSession           *recordSession;//捕獲視頻的會話
@property (copy  , nonatomic) dispatch_queue_t           captureQueue;//錄制的隊列
@property (strong, nonatomic) AVCaptureConnection        *audioConnection;//音頻錄制連接
@property (strong, nonatomic) AVCaptureConnection        *videoConnection;//視頻錄制連接
@property (strong, nonatomic) AVCaptureVideoDataOutput   *videoOutput;//視頻輸出
@property (strong, nonatomic) AVCaptureAudioDataOutput   *audioOutput;//音頻輸出


@property (atomic, assign) BOOL isCapturing;//正在錄制
@property (atomic, assign) CGFloat currentRecordTime;//當前錄制時間

@property (nonatomic,strong) openGLManager *glManager;


@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
        
    self.glManager = [[openGLManager alloc] initWithFrame:self.view.bounds];
    [self.view addSubview:self.glManager];
    
    [self recordSession];
    [self sessionLayerRunning];
    [self.glManager setupFilter: @"2Screen"];
}


- (void)sessionLayerRunning{
    
    dispatch_async(dispatch_get_main_queue(), ^{
        if (![self.recordSession isRunning]) {
            [self.recordSession startRunning];
        }
    });
}

- (void)sessionLayerStop{
    
    dispatch_async(dispatch_get_main_queue(), ^{
        if ([self.recordSession isRunning]) {
            [self.recordSession stopRunning];
        }
    });
}

- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    
    if (connection == self.videoConnection) {
        CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        [self.glManager renderBuffer:pixelBuffer];
    }
}

//捕獲視頻的會話
- (AVCaptureSession *)recordSession {
    if (_recordSession == nil) {
        _recordSession = [[AVCaptureSession alloc] init];
        //添加后置攝像頭的輸出
        if ([_recordSession canAddInput:self.cameraInput]) {
            [_recordSession addInput:self.cameraInput];
        }
        //添加后置麥克風的輸出
        if ([_recordSession canAddInput:self.audioMicInput]) {
            [_recordSession addInput:self.audioMicInput];
        }
        //添加視頻輸出
        if ([_recordSession canAddOutput:self.videoOutput]) {
            [_recordSession addOutput:self.videoOutput];
        }
        //添加音頻輸出
        if ([_recordSession canAddOutput:self.audioOutput]) {
            [_recordSession addOutput:self.audioOutput];
        }
        
        //設置視頻錄制的方向
        self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
        
        //分辨率
        if ([self.recordSession canSetSessionPreset:AVCaptureSessionPreset1280x720]){
            self.recordSession.sessionPreset = AVCaptureSessionPreset1280x720;
        }
        
        //自動白平衡
        if ([self.cameraInput.device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeAutoWhiteBalance]) {
            [self.cameraInput.device setWhiteBalanceMode:AVCaptureWhiteBalanceModeAutoWhiteBalance];
        }
        
    }
    return _recordSession;
}

//攝像頭設備
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position {
    //返回和視頻錄制相關的所有默認設備
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    //遍歷這些設備返回跟position相關的設備
    for (AVCaptureDevice *device in devices) {
        if ([device position] == position) {
            return device;
        }
    }
    return nil;
}

//攝像頭輸入
- (AVCaptureDeviceInput *)cameraInput {
    if (_cameraInput == nil) {
        NSError *error;
        _cameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self cameraWithPosition:AVCaptureDevicePositionFront] error:&error];
    }
    return _cameraInput;
}

//麥克風輸入
- (AVCaptureDeviceInput *)audioMicInput {
    if (_audioMicInput == nil) {
        AVCaptureDevice *mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
        NSError *error;
        _audioMicInput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:&error];
    }
    return _audioMicInput;
}


//錄制的隊列
- (dispatch_queue_t)captureQueue {
    if (_captureQueue == nil) {
        _captureQueue = dispatch_queue_create(0, 0);
    }
    return _captureQueue;
}

//視頻輸出
//kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange格式 無法渲染 使用
- (AVCaptureVideoDataOutput *)videoOutput {
    if (!_videoOutput) {
        _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
        [_videoOutput setSampleBufferDelegate:self queue:self.captureQueue];
        NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                        [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
                                        nil];
        _videoOutput.videoSettings = setcapSettings;
        _videoOutput.alwaysDiscardsLateVideoFrames = YES;
    }
    return _videoOutput;
}

//音頻輸出
- (AVCaptureAudioDataOutput *)audioOutput {
    if (_audioOutput == nil) {
        _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
        [_audioOutput setSampleBufferDelegate:self queue:self.captureQueue];
    }
    return _audioOutput;
}

//視頻連接
- (AVCaptureConnection *)videoConnection {
    if (!_videoConnection) {
        _videoConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
    }
    return _videoConnection;
}

//音頻連接
- (AVCaptureConnection *)audioConnection {
    if (_audioConnection == nil) {
        _audioConnection = [self.audioOutput connectionWithMediaType:AVMediaTypeAudio];
    }
    return _audioConnection;
}

@end

OpenGL 視頻預覽

#import "openGLManager.h"
#import <GLKit/GLKit.h>
#import <OpenGLES/ES3/glext.h>

@interface openGLManager (){
    // 渲染緩沖區、幀緩沖區對象
    GLuint renderBuffer, frameBuffer;
    //緩沖寬高
    GLint backingWidth, backingHeight;

    CVOpenGLESTextureRef texture;

    CVOpenGLESTextureCacheRef textureCache;//視頻緩沖區
    
    //著色器數據
    GLuint positionSlot,textureSlot,textureCoordSlot;
    
    GLuint         vertShader, fragShader;
    
    NSMutableArray  *attributes;
    NSMutableArray  *uniforms;

}

@property (nonatomic, strong) EAGLContext *context;

@property (nonatomic, strong) CAEAGLLayer* myLayer;

// 開始的時間戳
@property (nonatomic, assign) NSTimeInterval startTimeInterval;
// 著色器程序
@property (nonatomic, assign) GLuint program;
// 頂點緩存
@property (nonatomic, assign) GLuint vertexBuffer;
// 紋理 ID
@property (nonatomic, assign) GLuint textureID;

@property(readwrite, copy, nonatomic) NSString *vertexShaderLog;
@property(readwrite, copy, nonatomic) NSString *fragmentShaderLog;

@end

@implementation openGLManager

+ (Class)layerClass {
    return [CAEAGLLayer class];
}

- (instancetype)initWithFrame:(CGRect)frame {
    self = [super initWithFrame:frame];
    if (self) {
        [self initFilter];
    }
    return self;
}

- (void)initFilter{
    [self setupContext];
    [self setupLayer];
    [self setupCoreVideoTextureCache];
    [self loadShaders:@"Normal"];
    [self bindRender];
}

- (void)setupFilter:(NSString*)filterName{
    [self loadShaders:filterName];
}

- (void)renderBuffer:(CVPixelBufferRef)pixelBuffer {
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    
    if ([EAGLContext currentContext] != self.context) {
        [EAGLContext setCurrentContext:self.context];
    }
    
    glUseProgram(self.program);

    [self setDisplayFramebuffer];
    
    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT);
    
    [self cleanUpTexture];
    
    glActiveTexture(GL_TEXTURE4);
    // Create a CVOpenGLESTexture from the CVImageBuffer
    size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer);
    size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer);
    CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                                textureCache,
                                                                pixelBuffer,
                                                                NULL,
                                                                GL_TEXTURE_2D,
                                                                GL_RGBA,
                                                                (GLsizei)frameWidth,
                                                                (GLsizei)frameHeight,
                                                                GL_BGRA,
                                                                GL_UNSIGNED_BYTE,
                                                                0,
                                                                &texture);
    if (ret) {
        NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage ret: %d", ret);
    }
    glBindTexture(CVOpenGLESTextureGetTarget(texture), CVOpenGLESTextureGetName(texture));
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glUniform1i(textureSlot, 4);


    static const GLfloat imageVertices[] = {
        -1.0f, -1.0f,
        1.0f, -1.0f,
        -1.0f,  1.0f,
        1.0f,  1.0f,
    };
    
    static const GLfloat noRotationTextureCoordinates[] = {
        0.0f, 1.0f,
        1.0f, 1.0f,
        0.0f, 0.0f,
        1.0f, 0.0f,
    };
    
    glVertexAttribPointer(positionSlot, 2, GL_FLOAT, 0, 0, imageVertices);
    glVertexAttribPointer(textureCoordSlot, 2, GL_FLOAT, 0, 0, noRotationTextureCoordinates);
    
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
    
    [self presentFramebuffer];
        
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    
}

- (void)destroyDisplayFramebuffer {
    if ([EAGLContext currentContext] != self.context) {
        [EAGLContext setCurrentContext:self.context];
    }
    
    if (frameBuffer) {
        glDeleteFramebuffers(1, &frameBuffer);
        frameBuffer = 0;
    }
    
    if (renderBuffer) {
        glDeleteRenderbuffers(1, &renderBuffer);
        renderBuffer = 0;
    }
}

- (void)cleanUpTexture {
    if(texture) {
        CFRelease(texture);
        texture = NULL;
    }
    CVOpenGLESTextureCacheFlush(textureCache, 0);
}

- (void)setDisplayFramebuffer {
    if (!frameBuffer) {
        [self bindRender];
    }
    glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
    glViewport(0, 0, backingWidth, backingHeight);
}

- (void)presentFramebuffer {
    glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer);
    
    [self.context presentRenderbuffer:GL_RENDERBUFFER];
}

//渲染上下文
- (void)setupContext{
    self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
    if (!self.context) {
        self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
    }
    
    [EAGLContext setCurrentContext:self.context];
}
//設置圖層(CAEAGLLayer)
- (void)setupLayer{
    
    self.contentScaleFactor = [[UIScreen mainScreen] scale];
    self.myLayer = (CAEAGLLayer *)self.layer;
    self.myLayer.opaque = YES; //CALayer默認是透明的,透明的對性能負荷大,故將其關閉
    self.myLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
                                   // 由應用層來進行內存管理
                                   @(NO),kEAGLDrawablePropertyRetainedBacking,
                                   kEAGLColorFormatRGBA8,kEAGLDrawablePropertyColorFormat,
                                   nil];
        
}
//視頻紋理渲染的高效紋理緩沖區
- (void)setupCoreVideoTextureCache
{
    CVReturn result = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, self.context, NULL, &textureCache);
    if (result != kCVReturnSuccess) {
        NSLog(@"CVOpenGLESTextureCacheCreate fail %d",result);
    }
    
}
//綁定渲染緩沖區
- (void)bindRender{
    
    if ([EAGLContext currentContext] != self.context) {
        [EAGLContext setCurrentContext:self.context];
    }
    
    glGenFramebuffers(1, &frameBuffer);
    glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
    
    glGenRenderbuffers(1, &renderBuffer);
    glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer);
    
    [self.context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self.myLayer];
    
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
    
    if ( (backingWidth == 0) || (backingHeight == 0) ) {
        NSLog(@"Backing width: 0 || height: 0");

        [self destroyDisplayFramebuffer];
        return;
    }
    
    NSLog(@"Backing width: %d, height: %d", backingWidth, backingHeight);
    
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderBuffer);
    
}

//設置默認著色器
- (void)loadShaders:(NSString*)shaderFilename{
    if ([EAGLContext currentContext] != self.context) {
        [EAGLContext setCurrentContext:self.context];
    }
    
    
    self.program = glCreateProgram();
    attributes = [[NSMutableArray alloc] init];
    uniforms = [[NSMutableArray alloc] init];
    
    NSString *vertShaderPathname = [[NSBundle mainBundle] pathForResource:shaderFilename ofType:@"vsh"];
    NSString *vertexShaderString = [NSString stringWithContentsOfFile:vertShaderPathname encoding:NSUTF8StringEncoding error:nil];

    NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:shaderFilename ofType:@"fsh"];
    NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];

    if (![self compileShader:&vertShader type:GL_VERTEX_SHADER string:vertexShaderString]) {
        NSLog(@"Failed to compile vertex shader");
    }
    
    // Create and compile fragment shader
    if (![self compileShader:&fragShader  type:GL_FRAGMENT_SHADER string:fragmentShaderString]) {
        NSLog(@"Failed to compile fragment shader");
    }
    
    glAttachShader(self.program, vertShader);
    glAttachShader(self.program, fragShader);
    
    [self addAttribute:@"Position"];
    [self addAttribute:@"textureCoordinate"];
    
    if (![self link]) {
        NSString *fragLog = [self fragmentShaderLog];
        NSLog(@"Fragment shader compile log: %@", fragLog);
        NSString *vertLog = [self vertexShaderLog];
        NSLog(@"Vertex shader compile log: %@", vertLog);
        NSAssert(NO, @"Filter shader link failed");
    }
    
    positionSlot = [self attributeIndex:@"Position"];
    textureCoordSlot = [self attributeIndex:@"textureCoordinate"];
    textureSlot = [self uniformIndex:@"Texture"]; // This does assume a name of "inputTexture" for the fragment shader
    
    glUseProgram(self.program);

    glEnableVertexAttribArray(positionSlot);
    glEnableVertexAttribArray(textureCoordSlot);
}

- (GLuint)attributeIndex:(NSString *)attributeName {
    return (GLuint)[attributes indexOfObject:attributeName];
}
- (GLuint)uniformIndex:(NSString *)uniformName {
    return glGetUniformLocation(self.program, [uniformName UTF8String]);
}

- (BOOL)link {

    GLint status;
    glLinkProgram(self.program);
    glGetProgramiv(self.program, GL_LINK_STATUS, &status);
    if (status == GL_FALSE)
        return NO;
    
    if (vertShader) {
        glDeleteShader(vertShader);
        vertShader = 0;
    }
    if (fragShader) {
        glDeleteShader(fragShader);
        fragShader = 0;
    }
    return YES;
}

- (void)addAttribute:(NSString *)attributeName {
    if (![attributes containsObject:attributeName]) {
        [attributes addObject:attributeName];
        glBindAttribLocation(self.program, (GLuint)[attributes indexOfObject:attributeName], [attributeName UTF8String]);
    }
}

- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type string:(NSString *)shaderString {
//    CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();

    GLint status;
    const GLchar *source;
    
    source = (GLchar *)[shaderString UTF8String];
    if (!source) {
        NSLog(@"Failed to load vertex shader");
        return NO;
    }
    
    *shader = glCreateShader(type);
    glShaderSource(*shader, 1, &source, NULL);
    glCompileShader(*shader);
    
    glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);

    if (status != GL_TRUE) {
        GLint logLength;
        glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
        if (logLength > 0) {
            GLchar *log = (GLchar *)malloc(logLength);
            glGetShaderInfoLog(*shader, logLength, &logLength, log);
            if (shader == &vertShader) {
                self.vertexShaderLog = [NSString stringWithFormat:@"%s", log];
            } else {
                self.fragmentShaderLog = [NSString stringWithFormat:@"%s", log];
            }

            free(log);
        }
    }
    return status == GL_TRUE;
}

@end

以上OpenGL渲染流程基本上都是固定的寫法,我們主要關注下可編程管線GLSL部分的代碼

可編程渲染管線.png

頂點著色器

頂點著色器程序從“2Screen.vsh”文件讀取,代碼摘錄如下:

attribute vec4 Position;
attribute vec2 TextureCoord;
varying vec2 varyTextureCoord;

void main() {
    gl_Position = Position;
    varyTextureCoord = TextureCoord;
}

片元著色器

片元著色器程序從“2Screen.fsh”文件讀取,代碼摘錄如下:

precision highp float;
uniform sampler2D Texture;
varying highp vec2 varyTextureCoord;

void main() {
    vec2 uv = varyTextureCoord.xy;
    float y;
    if (uv.y >= 0.0 && uv.y <= 0.5) {
        y = uv.y + 0.25;
    }else {
        y = uv.y - 0.25;
    }
    gl_FragColor = texture2D(Texture, vec2(uv.x, y));
}

實際上,分屏核心代碼即是上面片元著色器的代碼。將原始采集圖像的中間區域(y軸取值范圍0.25~0.75)分別映射到屏幕的上下區域。

拓展題

讀者朋友們,我們實現了2行1列等分的分屏效果,相信對于m行n列(m、n為大于等于1的整數)等分的分屏效果你們也可以實現了吧,想一想,試一試吧。

參考資料

[GLES] 固定管線與可編程管線的差別
如何優雅地實現一個分屏濾鏡

最后編輯于
?著作權歸作者所有,轉載或內容合作請聯系作者
平臺聲明:文章內容(如有圖片或視頻亦包括在內)由作者上傳并發布,文章內容僅代表作者本人觀點,簡書系信息發布平臺,僅提供信息存儲服務。
  • 序言:七十年代末,一起剝皮案震驚了整個濱河市,隨后出現的幾起案子,更是在濱河造成了極大的恐慌,老刑警劉巖,帶你破解...
    沈念sama閱讀 228,443評論 6 532
  • 序言:濱河連續發生了三起死亡事件,死亡現場離奇詭異,居然都是意外死亡,警方通過查閱死者的電腦和手機,發現死者居然都...
    沈念sama閱讀 98,530評論 3 416
  • 文/潘曉璐 我一進店門,熙熙樓的掌柜王于貴愁眉苦臉地迎上來,“玉大人,你說我怎么就攤上這事。” “怎么了?”我有些...
    開封第一講書人閱讀 176,407評論 0 375
  • 文/不壞的土叔 我叫張陵,是天一觀的道長。 經常有香客問我,道長,這世上最難降的妖魔是什么? 我笑而不...
    開封第一講書人閱讀 62,981評論 1 312
  • 正文 為了忘掉前任,我火速辦了婚禮,結果婚禮上,老公的妹妹穿的比我還像新娘。我一直安慰自己,他們只是感情好,可當我...
    茶點故事閱讀 71,759評論 6 410
  • 文/花漫 我一把揭開白布。 她就那樣靜靜地躺著,像睡著了一般。 火紅的嫁衣襯著肌膚如雪。 梳的紋絲不亂的頭發上,一...
    開封第一講書人閱讀 55,204評論 1 324
  • 那天,我揣著相機與錄音,去河邊找鬼。 笑死,一個胖子當著我的面吹牛,可吹牛的內容都是我干的。 我是一名探鬼主播,決...
    沈念sama閱讀 43,263評論 3 441
  • 文/蒼蘭香墨 我猛地睜開眼,長吁一口氣:“原來是場噩夢啊……” “哼!你這毒婦竟也來了?” 一聲冷哼從身側響起,我...
    開封第一講書人閱讀 42,415評論 0 288
  • 序言:老撾萬榮一對情侶失蹤,失蹤者是張志新(化名)和其女友劉穎,沒想到半個月后,有當地人在樹林里發現了一具尸體,經...
    沈念sama閱讀 48,955評論 1 336
  • 正文 獨居荒郊野嶺守林人離奇死亡,尸身上長有42處帶血的膿包…… 初始之章·張勛 以下內容為張勛視角 年9月15日...
    茶點故事閱讀 40,782評論 3 354
  • 正文 我和宋清朗相戀三年,在試婚紗的時候發現自己被綠了。 大學時的朋友給我發了我未婚夫和他白月光在一起吃飯的照片。...
    茶點故事閱讀 42,983評論 1 369
  • 序言:一個原本活蹦亂跳的男人離奇死亡,死狀恐怖,靈堂內的尸體忽然破棺而出,到底是詐尸還是另有隱情,我是刑警寧澤,帶...
    沈念sama閱讀 38,528評論 5 359
  • 正文 年R本政府宣布,位于F島的核電站,受9級特大地震影響,放射性物質發生泄漏。R本人自食惡果不足惜,卻給世界環境...
    茶點故事閱讀 44,222評論 3 347
  • 文/蒙蒙 一、第九天 我趴在偏房一處隱蔽的房頂上張望。 院中可真熱鬧,春花似錦、人聲如沸。這莊子的主人今日做“春日...
    開封第一講書人閱讀 34,650評論 0 26
  • 文/蒼蘭香墨 我抬頭看了看天上的太陽。三九已至,卻和暖如春,著一層夾襖步出監牢的瞬間,已是汗流浹背。 一陣腳步聲響...
    開封第一講書人閱讀 35,892評論 1 286
  • 我被黑心中介騙來泰國打工, 沒想到剛下飛機就差點兒被人妖公主榨干…… 1. 我叫王不留,地道東北人。 一個月前我還...
    沈念sama閱讀 51,675評論 3 392
  • 正文 我出身青樓,卻偏偏與公主長得像,于是被迫代替她去往敵國和親。 傳聞我的和親對象是個殘疾皇子,可洞房花燭夜當晚...
    茶點故事閱讀 47,967評論 2 374

推薦閱讀更多精彩內容