天天看點

ALAsset和ALAssetRepresentation詳解

前言

ALAsset類代表相冊中的每個資源檔案,可以通過它擷取資源檔案的相關資訊還能修改和建立資源檔案,ALAssetRepresentation類代表相冊中每個資源檔案的詳細資訊,可以通過它擷取資源的大小,名字,路徑等詳細資訊。

根據URL擷取ALAsset

怎樣根據url擷取ALAsset對象

url類似于(

assets-library://asset/asset.PNG?id=2EAEEF99-2C75-4484-B922-9A2F34507537&ext=PNG

)

注意這裡有個坑 在ios8.1上會出現

解決方法如下

定義Block

typedef void(^ZJGetALAssetBlock)(ALAsset *);           

複制

擷取方法

+ (void)getALAssetByNSURL:(NSURL *)url callback:(ZJGetALAssetBlock)block{
    ALAssetsLibrary *lib = [DNAsset defaultAssetsLibrary];
    [lib assetForURL:url resultBlock:^(ALAsset *asset){
        if (asset) {
            block(asset);
        } else {
            // On iOS 8.1 [library assetForUrl] Photo Streams always returns nil. Try to obtain it in an alternative way
            [lib enumerateGroupsWithTypes:ALAssetsGroupPhotoStream
                               usingBlock:^(ALAssetsGroup *group, BOOL *stop)
             {
                 [group enumerateAssetsWithOptions:NSEnumerationReverse
                                        usingBlock:^(ALAsset *result, NSUInteger index, BOOL *stop) {
                                            
                                            if([[result valueForProperty:ALAssetPropertyAssetURL] isEqual:url])
                                            {
                                                block(asset);
                                                *stop = YES;
                                            }
                                        }];
             }
                             failureBlock:^(NSError *error)
             {
                 block(nil);
             }];
        }
        
    } failureBlock:^(NSError *error){
        block(nil);
    }];
}           

複制

屬性擷取

//擷取資源圖檔的詳細資源資訊
ALAssetRepresentation* representation = [asset defaultRepresentation];
//擷取資源圖檔的長寬
CGSize dimension = [representation dimensions];
//擷取資源圖檔的高清圖
[representation fullResolutionImage];
//擷取資源圖檔的全屏圖
[representation fullScreenImage];
//擷取資源圖檔的名字
NSString* filename = [representation filename];
NSLog(@"filename:%@",filename);
//縮放倍數
[representation scale];
//圖檔資源容量大小
[representation size];
//圖檔資源原資料
[representation metadata];
//旋轉方向
[representation orientation];
//資源圖檔url位址,該位址和ALAsset通過ALAssetPropertyAssetURL擷取的url位址是一樣的
NSURL* url = [representation url];
NSLog(@"url:%@",url);
//資源圖檔uti,唯一标示符
NSLog(@"uti:%@",[representation UTI]);           

複制

判斷選擇的是圖檔還是視訊

let representation =  alasset.defaultRepresentation()
//類型
let alassetType = alasset.valueForProperty(ALAssetPropertyType) as! String;
//檔案名
let fileName = representation.filename();
//檔案大小
let fileSize = representation.size();
//時間
let fileDate = alasset.valueForProperty(ALAssetPropertyDate) as! NSDate;
if(alassetType == ALAssetTypePhoto){
    
}else if(alassetType == ALAssetTypeVideo){
    
}           

複制

檔案上傳

ALAsset的url沒法用于上傳,雖然可以奪取NSData 但是這樣就把資料都加載在記憶體中,如果是一個較大的視訊檔案 顯然是不太合理的 是以我們可以把檔案自己儲存到臨時的位置 進行上傳

定義宏

// 照片原圖路徑
#define ZJImageCachesPath [[NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES) objectAtIndex:0] stringByAppendingPathComponent:@"images"]

// 視訊URL路徑
#define ZJVideoCachesPath [[NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES) objectAtIndex:0] stringByAppendingPathComponent:@"video"]           

複制

定義Block

typedef void(^ALAssetToNSURLBlock)(NSURL *);           

複制

方法

// 将原始圖檔轉化為NSData資料,寫入沙盒
+ (void)getImageUrlWithALAsset:(ALAsset *)asset callback:(ALAssetToNSURLBlock) block
{
    // 建立存放原始圖的檔案夾--->OriginalPhotoImages
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
        NSFileManager * fileManager = [NSFileManager defaultManager];
        if (![fileManager fileExistsAtPath:ZJImageCachesPath]) {
            [fileManager createDirectoryAtPath:ZJImageCachesPath withIntermediateDirectories:YES attributes:nil error:nil];
        }
        
        ALAssetRepresentation *rep = [asset defaultRepresentation];
        Byte *buffer = (Byte*)malloc((unsigned long)rep.size);
        NSUInteger buffered = [rep getBytes:buffer fromOffset:0.0 length:((unsigned long)rep.size) error:nil];
        NSData *data = [NSData dataWithBytesNoCopy:buffer length:buffered freeWhenDone:YES];
        NSString * imagePath = [ZJImageCachesPath stringByAppendingPathComponent:rep.filename];
        //删除原有的臨時檔案
        if ([fileManager fileExistsAtPath:imagePath]) {
            [fileManager removeItemAtPath:imagePath error:nil];
        }
        [data writeToFile:imagePath atomically:YES];
        block([NSURL fileURLWithPath:imagePath]);
    });
}

// 将原始視訊轉化為NSData資料,寫入沙盒
+ (void)getVideoUrlWithALAsset:(ALAsset *)asset callback:(ALAssetToNSURLBlock) block
{
    // 解析一下,為什麼視訊不像圖檔一樣一次性開辟本身大小的記憶體寫入?
    // 想想,如果1個視訊有1G多,難道直接開辟1G多的空間大小來寫?
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
        NSFileManager * fileManager = [NSFileManager defaultManager];
        if (![fileManager fileExistsAtPath:ZJVideoCachesPath]) {
            [fileManager createDirectoryAtPath:ZJVideoCachesPath withIntermediateDirectories:YES attributes:nil error:nil];
        }
        
        ALAssetRepresentation *rep = [asset defaultRepresentation];
        NSString * videoPath = [ZJVideoCachesPath stringByAppendingPathComponent:rep.filename];
        
        //删除原有的臨時檔案
        if ([fileManager fileExistsAtPath:videoPath]) {
            [fileManager removeItemAtPath:videoPath error:nil];
        }
        char const *cvideoPath = [videoPath UTF8String];
        FILE *file = fopen(cvideoPath, "ab+");
        if (file) {
            const int bufferSize = 1024 * 1024;
            // 初始化一個1M的buffer
            Byte *buffer = (Byte*)malloc(bufferSize);
            NSUInteger read = 0, offset = 0, written = 0;
            NSError* err = nil;
            if (rep.size != 0)
            {
                do {
                    read = [rep getBytes:buffer fromOffset:offset length:bufferSize error:&err];
                    written = fwrite(buffer, sizeof(char), read, file);
                    offset += read;
                } while (read != 0 && !err);//沒到結尾,沒出錯,ok繼續
            }
            // 釋放緩沖區,關閉檔案
            free(buffer);
            buffer = NULL;
            fclose(file);
            file = NULL;
        }
        block([NSURL fileURLWithPath:videoPath]);
    });
}           

複制

用Alamofire上傳

上傳時附帶其他參數(multipart)

但是這種方式沒法得到上傳進度

//上傳檔案
static func uploadImage(url:String,parameters:[String:AnyObject],imagePath:NSURL,fileParName:String){
Alamofire.upload(
    .POST,
    url,
    multipartFormData: { multipartFormData in
        multipartFormData.appendBodyPart(fileURL: imagePath, name: fileParName)
        // 這裡就是綁定參數的地方
        for (key, value) in parameters {
            if(value.stringValue != nil){
                multipartFormData.appendBodyPart(data: value.stringValue.dataUsingEncoding(NSUTF8StringEncoding)!, name: key);
            }
        }
    },
    encodingCompletion: { encodingResult in
        switch encodingResult {
        case .Success(let upload, _, _):
            upload.responseJSON { response in
                debugPrint(response)
            }
        case .Failure(let encodingError):
            print(encodingError)
        }
    }
)           

複制

可以擷取上傳進度的方式 但是沒法附帶其他參數

Alamofire.upload(.POST, "https://httpbin.org/post", file: imagePath)
    .progress { bytesWritten, totalBytesWritten, totalBytesExpectedToWrite in
        dispatch_async(dispatch_get_main_queue()) {
            print("Total bytes written on main queue: \(totalBytesWritten)")
        }
    }
    .validate()
    .responseJSON { response in
        debugPrint(response)
}           

複制

是以例如設定使用者頭像等就用第一種方式

要是做檔案上傳就必須用第二種方式 第二種方式也能控制暫停、繼續、停止等操作