Commit 686bef4f authored by Jennie Shi's avatar Jennie Shi

插件更新

parent 1f98a72e
......@@ -3063,8 +3063,10 @@
}
}
},
"cordova-plugin-hand-idcardplugin": {
"version": "file:../../plugins/cordova-plugin-hand-idcardplugin",
"cordova-plugin-inappbrowser": {
"version": "2.0.2",
"resolved": "https://registry.npm.taobao.org/cordova-plugin-inappbrowser/download/cordova-plugin-inappbrowser-2.0.2.tgz",
"integrity": "sha1-pEj8vpdMprQpiL00VRCdkj1sl2k=",
"dev": true
},
"core-js": {
......
......@@ -34,9 +34,6 @@
"cordova-plugin-file-transfer": {
"PACKAGE_NAME": "com.xcmg.app"
},
"cordova-plugin-hrms-faceidentify": {
"PACKAGE_NAME": "com.xcmg.app"
},
"cordova-plugin-image-picker": {
"PACKAGE_NAME": "com.xcmg.app"
},
......
###人脸识别插件
##插件方法说明:
> 插件目前提供两个方法,faceDetect 和 getLocalImage,这两个方法的作用分别是 “人脸识别” 和 “获取本地图片”,
### faceDetect方法
faceDetect(successCallback,errorCallback)
* successCallback 成功时的回调,返回检测信息json
#pragma mark - 人脸检测返回数据示例
返回的数据是个JSon对象
{
age = 32; //识别年龄
beauty = 72; //魅力值
expression = 14; //表情值
"face_id" = 1831653390825881599; //faceid
gender = 99; //性别 0(女)~100(男),越靠近100越有可能为男性
glass = 1; //是否戴眼镜
height = 232; //人脸框 高度
imgPath = "/private/var/mobile/Containers/Data/Application/89F513B4-B1DD-497E-8DEE-09162B469C6F/tmp/1481783049.894455"; //人脸本地存储路径
pitch = "-3"; //上下偏移[-30,30]
roll = "-2"; //平面旋转[-180,180]
width = 232; //人脸框 高度
x = 104; //人脸框左上角x
y = 218; //人脸框左上角y
yaw = 2; //左右偏移[-30,30]
}
* errorCallback 失败时的回调 (没有检测到)
### getLocalImage 方法
getLocalImage(successCallback,errorCallback,arg)
successCallback 成功回调方法,会返回一个base64图片字符串
arg 为调用faceDetect方法返回的本地图片路径。
This diff is collapsed.
package com.hand.face.utils;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
/**
* Created by xiang.wang on 2016/12/9.
* 加速传感器
*/
public class Accelerometer {
private SensorManager a = null;
private boolean b = false;
private static CLOCKWISE_ANGLE c;
private SensorEventListener d = new SensorEventListener() {
public void onAccuracyChanged(Sensor var1, int var2) {
}
public void onSensorChanged(SensorEvent var1) {
if(var1.sensor.getType() == 1) {
float var2 = var1.values[0];
float var3 = var1.values[1];
float var4 = var1.values[2];
if(Math.abs(var2) > 3.0F || Math.abs(var3) > 3.0F) {
if(Math.abs(var2) > Math.abs(var3)) {
if(var2 > 0.0F) {
Accelerometer.c = CLOCKWISE_ANGLE.Deg0;
} else {
Accelerometer.c = CLOCKWISE_ANGLE.Deg180;
}
} else if(var3 > 0.0F) {
Accelerometer.c = CLOCKWISE_ANGLE.Deg90;
} else {
Accelerometer.c = CLOCKWISE_ANGLE.Deg270;
}
}
}
}
};
public Accelerometer(Context var1) {
this.a = (SensorManager)var1.getSystemService("sensor");
c = CLOCKWISE_ANGLE.Deg0;
}
public void start() {
if(!this.b) {
this.b = true;
c = CLOCKWISE_ANGLE.Deg0;
this.a.registerListener(this.d, this.a.getDefaultSensor(1), 3);
}
}
public void stop() {
if(this.b) {
this.b = false;
this.a.unregisterListener(this.d);
}
}
public static int getDirection() {
return c.getValue();
}
public static enum CLOCKWISE_ANGLE {
Deg0(0),
Deg90(1),
Deg180(2),
Deg270(3);
private int value;
private CLOCKWISE_ANGLE(int var3) {
this.value = var3;
}
public int getValue() {
return this.value;
}
}
}
package com.youtu.sign;
public class Base64Util {
private static final char last2byte = (char) Integer
.parseInt("00000011", 2);
private static final char last4byte = (char) Integer
.parseInt("00001111", 2);
private static final char last6byte = (char) Integer
.parseInt("00111111", 2);
private static final char lead6byte = (char) Integer
.parseInt("11111100", 2);
private static final char lead4byte = (char) Integer
.parseInt("11110000", 2);
private static final char lead2byte = (char) Integer
.parseInt("11000000", 2);
private static final char[] encodeTable = new char[] { 'A', 'B', 'C', 'D',
'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q',
'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd',
'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',
'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3',
'4', '5', '6', '7', '8', '9', '+', '/' };
/**
* Base64 encoding.
*
* @param from
* The src data.
* @return cryto_str
*/
public static String encode(byte[] from) {
StringBuilder to = new StringBuilder((int) (from.length * 1.34) + 3);
int num = 0;
char currentByte = 0;
for (int i = 0; i < from.length; i++) {
num = num % 8;
while (num < 8) {
switch (num) {
case 0:
currentByte = (char) (from[i] & lead6byte);
currentByte = (char) (currentByte >>> 2);
break;
case 2:
currentByte = (char) (from[i] & last6byte);
break;
case 4:
currentByte = (char) (from[i] & last4byte);
currentByte = (char) (currentByte << 2);
if ((i + 1) < from.length) {
currentByte |= (from[i + 1] & lead2byte) >>> 6;
}
break;
case 6:
currentByte = (char) (from[i] & last2byte);
currentByte = (char) (currentByte << 4);
if ((i + 1) < from.length) {
currentByte |= (from[i + 1] & lead4byte) >>> 4;
}
break;
}
to.append(encodeTable[currentByte]);
num += 6;
}
}
if (to.length() % 4 != 0) {
for (int i = 4 - to.length() % 4; i > 0; i--) {
to.append("=");
}
}
return to.toString();
}
}
package com.hand.face.utils;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Created by USER on 2016/12/10.
*/
public class CamParaUtil {
private static final String TAG = "CamParaUtil";
private CameraSizeComparator sizeComparator = new CameraSizeComparator();
private static CamParaUtil myCamPara = null;
private CamParaUtil(){
}
public static CamParaUtil getInstance(){
if(myCamPara == null){
myCamPara = new CamParaUtil();
return myCamPara;
}
else{
return myCamPara;
}
}
public Size getPropPreviewSize(List<Size> list, float th, int minWidth){
Collections.sort(list, sizeComparator);
int i = 0;
for(Size s:list){
// if((s.width >= minWidth) && equalRate(s, th)){
if((s.width >= minWidth)){
Log.i(TAG, "PreviewSize:w = " + s.width + "h = " + s.height);
break;
}
i++;
}
if(i == list.size()){
i = 0;//如果没找到,就选最小的size
}
return list.get(i);
}
public Size getPropPictureSize(List<Size> list, float th, int minWidth){
Collections.sort(list, sizeComparator);
int i = 0;
for(Size s:list){
// if((s.width >= minWidth) && equalRate(s, th)){
if((s.width >= minWidth)){
Log.i(TAG, "PictureSize : w = " + s.width + "h = " + s.height);
break;
}
i++;
}
if(i == list.size()){
i = 0;//如果没找到,就选最小的size
}
return list.get(i);
}
public boolean equalRate(Size s, float rate){
float r = (float)(s.width)/(float)(s.height);
if(Math.abs(r - rate) <= 0.4)
{
return true;
}
else{
return false;
}
}
public class CameraSizeComparator implements Comparator<Size> {
public int compare(Size lhs, Size rhs) {
// TODO Auto-generated method stub
if(lhs.width == rhs.width){
return 0;
}
else if(lhs.width > rhs.width){
return 1;
}
else{
return -1;
}
}
}
/**打印支持的previewSizes
* @param params
*/
public void printSupportPreviewSize(Camera.Parameters params){
List<Size> previewSizes = params.getSupportedPreviewSizes();
for(int i=0; i< previewSizes.size(); i++){
Size size = previewSizes.get(i);
Log.i(TAG, "previewSizes:width = " + size.width + " height = " + size.height);
}
}
/**打印支持的pictureSizes
* @param params
*/
public void printSupportPictureSize(Camera.Parameters params){
List<Size> pictureSizes = params.getSupportedPictureSizes();
for(int i=0; i< pictureSizes.size(); i++){
Size size = pictureSizes.get(i);
Log.i(TAG, "pictureSizes:width = " + size.width
+ " height = " + size.height);
}
}
/**打印支持的聚焦模式
* @param params
*/
public void printSupportFocusMode(Camera.Parameters params){
List<String> focusModes = params.getSupportedFocusModes();
for(String mode : focusModes){
Log.i(TAG, "focusModes--" + mode);
}
}
}
package com.hand.face.myinterface;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;
import com.hand.face.ui.FaceCompareActivity;
import com.hand.face.utils.CamParaUtil;
import com.hand.face.utils.FileUtil;
import com.hand.face.utils.ImageUtil;
import java.io.IOException;
import java.util.List;
/**
* Created by USER on 2016/12/10.
*/
public class CameraInterface {
private static final String TAG = "CameraInterface";
private Camera mCamera;
private Camera.Parameters mParams;
private boolean isPreviewing = false;
private float mPreviwRate = -1f;
private int mCameraId = -1;
private boolean isGoolgeFaceDetectOn = false;
private static CameraInterface mCameraInterface;
private FaceCompareActivity.NV21ImgCallBack imgCallBack;
public interface CamOpenOverCallback{
public void cameraHasOpened();
}
private CameraInterface(){
}
public static synchronized CameraInterface getInstance(){
if(mCameraInterface == null){
mCameraInterface = new CameraInterface();
}
return mCameraInterface;
}
public void setCallBack(FaceCompareActivity.NV21ImgCallBack callBack){
imgCallBack = callBack;
}
/**打开Camera
* @param callback
*/
public void doOpenCamera(CamOpenOverCallback callback, int cameraId){
Log.i(TAG, "Camera open....");
try {
mCamera = Camera.open(cameraId);
mCameraId = cameraId;
if(callback != null){
callback.cameraHasOpened();
}
}catch (Exception e){
}
}
/**开启预览
* @param holder
* @param previewRate
*/
public void doStartPreview(SurfaceHolder holder, float previewRate){
Log.i(TAG, "doStartPreview...");
if(isPreviewing){
mCamera.stopPreview();
return;
}
if(mCamera != null){
mParams = mCamera.getParameters();
mParams.setPictureFormat(PixelFormat.JPEG);//设置拍照后存储的图片格式
CamParaUtil.getInstance().printSupportPictureSize(mParams);
CamParaUtil.getInstance().printSupportPreviewSize(mParams);
//设置PreviewSize和PictureSize
Size pictureSize = CamParaUtil.getInstance().getPropPictureSize(
mParams.getSupportedPictureSizes(),previewRate, 800);
mParams.setPictureSize(pictureSize.width, pictureSize.height);
Size previewSize = CamParaUtil.getInstance().getPropPreviewSize(
mParams.getSupportedPreviewSizes(), previewRate, 800);
mParams.setPreviewSize(previewSize.width, previewSize.height);
//设置视频格式为默认格式NV21
mParams.setPreviewFormat(ImageFormat.NV21);
//默认是横拍旋转90度
mCamera.setDisplayOrientation(90);
CamParaUtil.getInstance().printSupportFocusMode(mParams);
List<String> focusModes = mParams.getSupportedFocusModes();
if(focusModes.contains("continuous-video")){
mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
//实时获取视频流数据
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if(imgCallBack!=null && data!=null){
imgCallBack.getImgBuffer(data);
}
}
});
mCamera.setParameters(mParams);
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();//开启预览
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
isPreviewing = true;
mPreviwRate = previewRate;
mParams = mCamera.getParameters(); //重新get一次
Log.i(TAG, "最终设置:PreviewSize--With = " + mParams.getPreviewSize().width
+ "Height = " + mParams.getPreviewSize().height);
Log.i(TAG, "最终设置:PictureSize--With = " + mParams.getPictureSize().width
+ "Height = " + mParams.getPictureSize().height);
}
}
/**
* 停止预览,释放Camera
*/
public void doStopCamera(){
if(null != mCamera)
{
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
isPreviewing = false;
mPreviwRate = -1f;
mCamera.release();
mCamera = null;
}
}
/**
* 拍照
*/
public void doTakePicture(){
if(isPreviewing && (mCamera != null)){
mCamera.takePicture(mShutterCallback, null, mJpegPictureCallback);
}
}
/**获取Camera.Parameters
* @return
*/
public Camera.Parameters getCameraParams(){
if(mCamera != null){
mParams = mCamera.getParameters();
return mParams;
}
return null;
}
public Camera getmCamera(){
if(mCamera != null){
return mCamera;
}
return null;
}
/**获取Camera实例
* @return
*/
public Camera getCameraDevice(){
return mCamera;
}
public int getCameraId(){
return mCameraId;
}
/*为了实现拍照的快门声音及拍照保存照片需要下面三个回调变量*/
ShutterCallback mShutterCallback = new ShutterCallback()
//快门按下的回调,在这里我们可以设置类似播放“咔嚓”声之类的操作。默认的就是咔嚓。
{
public void onShutter() {
// TODO Auto-generated method stub
Log.i(TAG, "myShutterCallback:onShutter...");
}
};
PictureCallback mRawCallback = new PictureCallback()
// 拍摄的未压缩原数据的回调,可以为null
{
public void onPictureTaken(byte[] data, Camera camera) {
// TODO Auto-generated method stub
Log.i(TAG, "myRawCallback:onPictureTaken...");
}
};
PictureCallback mJpegPictureCallback = new PictureCallback()
//对jpeg图像数据的回调,最重要的一个回调
{
public void onPictureTaken(byte[] data, Camera camera) {
// TODO Auto-generated method stub
Log.i(TAG, "myJpegCallback:onPictureTaken...");
Bitmap b = null;
if(null != data){
b = BitmapFactory.decodeByteArray(data, 0, data.length);//data是字节数据,将其解析成位图
mCamera.stopPreview();
isPreviewing = false;
}
//保存图片到sdcard
if(null != b)
{
//设置FOCUS_MODE_CONTINUOUS_VIDEO)之后,myParam.set("rotation", 90)失效。
//图片竟然不能旋转了,故这里要旋转下
Bitmap rotaBitmap = ImageUtil.getRotateBitmap(b, 90.0f);
// FileUtil.saveBitmap(rotaBitmap);
}
//再次进入预览
mCamera.startPreview();
isPreviewing = true;
}
};
}
package com.hand.face.view;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.hand.face.myinterface.CameraInterface;
import com.hand.face.ui.FaceCompareActivity;
import java.io.ByteArrayOutputStream;
/**
* Created by USER on 2016/12/10.
*/
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback,FaceCompareActivity.NV21ImgCallBack {
private static final String TAG = "CameraSurfaceView";
CameraInterface mCameraInterface;
Context mContext;
SurfaceHolder mSurfaceHolder;
// /** 当前屏幕旋转角度*/
// private int mOrientation=0;
private byte[] buffer;
private String contextName;
private int orientation = -90;
private int cameraId = 0;
public CameraSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
// TODO Auto-generated constructor stub
mContext = context;
mSurfaceHolder = getHolder();
mSurfaceHolder.setFormat(PixelFormat.TRANSPARENT);//translucent半透明 transparent透明
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mSurfaceHolder.addCallback(this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
Log.i(TAG, "surfaceCreated...");
CameraInterface.getInstance().setCallBack(this);
contextName = mContext.getClass().getName();
if("com.hand.face.ui.FaceCompareActivity".equals(contextName)){
CameraInterface.getInstance().doOpenCamera(null, CameraInfo.CAMERA_FACING_FRONT);
cameraId = CameraInfo.CAMERA_FACING_FRONT;
}else if("com.hand.face.ui.FaceSerchActivity".equals(contextName)){
CameraInterface.getInstance().doOpenCamera(null, CameraInfo.CAMERA_FACING_BACK);
cameraId = CameraInfo.CAMERA_FACING_BACK;
}else{
CameraInterface.getInstance().doOpenCamera(null, CameraInfo.CAMERA_FACING_FRONT);
cameraId = CameraInfo.CAMERA_FACING_FRONT;
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
Log.i(TAG, "surfaceChanged...");
CameraInterface.getInstance().doStartPreview(mSurfaceHolder, 1.333f);
// updateCameraOrientation();
if(CameraInterface.getInstance()!=null && CameraInterface.getInstance().getCameraParams()!=null){
if("com.hand.face.ui.FaceCompareActivity".equals(contextName)){
setCameraDisplayOrientation((Activity)mContext,cameraId,CameraInterface.getInstance().getmCamera());
}else if("com.hand.face.ui.FaceSerchActivity".equals(contextName)){
setCameraDisplayOrientation((Activity)mContext,cameraId,CameraInterface.getInstance().getmCamera());
}
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
Log.i(TAG, "surfaceDestroyed...");
CameraInterface.getInstance().doStopCamera();
}
public SurfaceHolder getSurfaceHolder(){
return mSurfaceHolder;
}
public Bitmap getPicture(){
Bitmap bmp = null;
//转化为图片
if(buffer!=null){
YuvImage image = new YuvImage(buffer, ImageFormat.NV21, CameraInterface.getInstance().getCameraParams().getPreviewSize().width, CameraInterface.getInstance().getCameraParams().getPreviewSize().height, null); //ImageFormat.NV21 640 480
ByteArrayOutputStream outputSteam = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 85, outputSteam); // 将NV21格式图片,以质量70压缩成Jpeg,并得到JPEG数据流
byte[] jpegData = outputSteam.toByteArray(); //从outputSteam得到byte数据
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 1;
bmp = BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length, options);
if(CameraInfo.CAMERA_FACING_FRONT == CameraInterface.getInstance().getCameraId()){
bmp = rotaingImageView(-orientation, bmp);//旋转图片
}else{
bmp = rotaingImageView(orientation, bmp);//旋转图片
}
}
return bmp;
}
private Bitmap rotaingImageView(int angle , Bitmap bitmap) {
//旋转图片 动作
Matrix matrix = new Matrix();;
matrix.postRotate(angle);
System.out.println("angle2=" + angle);
// 创建新的图片
Bitmap resizedBitmap = Bitmap.createBitmap(bitmap, 0, 0,
bitmap.getWidth(), bitmap.getHeight(), matrix, true);
return resizedBitmap;
}
/**
* 根据当前朝向修改保存图片的旋转角度
*/
// private void updateCameraOrientation(){
// if(CameraInterface.getInstance()!=null && CameraInterface.getInstance().getCameraParams()!=null){
// Camera.Parameters parameters = CameraInterface.getInstance().getCameraParams();
// //rotation参数为 0、90、180、270。水平方向为0。
// int rotation=90+mOrientation==360?0:90+mOrientation;
// //前置摄像头需要对垂直方向做变换,否则照片是颠倒的
// if(CameraInfo.CAMERA_FACING_FRONT == CameraInterface.getInstance().getCameraId()){
// if(rotation==90) rotation=270;
// else if (rotation==270) rotation=90;
// }
// parameters.setRotation(rotation);//生成的图片转90°
// //预览图片旋转90°
// CameraInterface.getInstance().getmCamera().setDisplayOrientation(90);//预览转90°
// CameraInterface.getInstance().getmCamera().setParameters(parameters);
// //开启屏幕朝向监听
// startOrientationChangeListener();
// }
// }
/**
* 启动屏幕朝向改变监听函数 用于在屏幕横竖屏切换时改变保存的图片的方向
*/
// private void startOrientationChangeListener() {
// OrientationEventListener mOrEventListener = new OrientationEventListener(mContext) {
// @Override
// public void onOrientationChanged(int rotation) {
//
// if (((rotation >= 0) && (rotation <= 45)) || (rotation > 315)){
// rotation=0;
// }else if ((rotation > 45) && (rotation <= 135)) {
// rotation=90;
// }
// else if ((rotation > 135) && (rotation <= 225)) {
// rotation=180;
// }
// else if((rotation > 225) && (rotation <= 315)) {
// rotation=270;
// }else {
// rotation=0;
// }
// if(rotation==mOrientation)
// return;
// mOrientation=rotation;
// updateCameraOrientation();
// }
// };
// mOrEventListener.enable();
// }
@Override
public void getImgBuffer(byte[] nv21) {
if (null == nv21) {
return;
}
if(buffer == null){
buffer = new byte[nv21.length];
}
synchronized (nv21) {
System.arraycopy(nv21, 0, buffer, 0, nv21.length);
}
}
public int getDisplayRotation(Activity activity) {
int rotation = activity.getWindowManager().getDefaultDisplay()
.getRotation();
switch (rotation) {
case Surface.ROTATION_0: return 0;
case Surface.ROTATION_90: return 90;
case Surface.ROTATION_180: return 180;
case Surface.ROTATION_270: return 270;
}
return 0;
}
public void setCameraDisplayOrientation(Activity activity,
int cameraId, Camera camera) {
// See android.hardware.Camera.setCameraDisplayOrientation for
// documentation.
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int degrees = getDisplayRotation(activity);
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
orientation = result;
camera.setDisplayOrientation(result);
}
}
package com.hand.face.common;
/**
* Created by qingliang on 16/10/31.
*/
public class Config {
public static final String APP_ID = "";
public static final String SECRET_ID = "";
public static final String SECRET_KEY = "";
}
package com.hand.face.utils;
import android.content.Context;
import android.graphics.Point;
import android.util.DisplayMetrics;
import android.util.Log;
/**
* Created by USER on 2016/12/10.
*/
public class DisplayUtil {
private static final String TAG = "DisplayUtil";
/**
* dip转px
* @param context
* @param dipValue
* @return
*/
public static int dip2px(Context context, float dipValue){
final float scale = context.getResources().getDisplayMetrics().density;
return (int)(dipValue * scale + 0.5f);
}
/**
* px转dip
* @param context
* @param pxValue
* @return
*/
public static int px2dip(Context context, float pxValue){
final float scale = context.getResources().getDisplayMetrics().density;
return (int)(pxValue / scale + 0.5f);
}
/**
* 获取屏幕宽度和高度,单位为px
* @param context
* @return
*/
public static Point getScreenMetrics(Context context){
DisplayMetrics dm =context.getResources().getDisplayMetrics();
int w_screen = dm.widthPixels;
int h_screen = dm.heightPixels;
Log.i(TAG, "Screen---Width = " + w_screen + " Height = " + h_screen + " densityDpi = " + dm.densityDpi);
return new Point(w_screen, h_screen);
}
/**
* 获取屏幕长宽比
* @param context
* @return
*/
public static float getScreenRate(Context context){
Point P = getScreenMetrics(context);
float H = P.y;
float W = P.x;
return (H/W);
}
}
package com.hand.face.utils;
/**
* Created by USER on 2016/12/10.
*/
public class EventUtil {
public static final int UPDATE_FACE_RECT = 0;
public static final int CAMERA_HAS_STARTED_PREVIEW = 1;
}
package com.hand.hrms.face;
import android.widget.Toast;
import com.hand.face.myinterface.NotifyMessage;
import com.hand.face.ui.FaceCompareActivity;
import com.hand.face.ui.FaceSerchActivity;
import com.hand.face.utils.NotifyMessageManager;
import com.hand.face.utils.Utils;
import com.youtu.sign.Base64Util;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaInterface;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CordovaWebView;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Created by USER on 2016/12/13.
*/
public class FacePlugin extends CordovaPlugin implements NotifyMessage {
private NotifyMessageManager notify;
private CallbackContext mCallbackContext;
//初始化插件
@Override
public void initialize(CordovaInterface cordova, CordovaWebView webView) {
super.initialize(cordova, webView);
notify = NotifyMessageManager.getInstance();
notify.setNotifyMessage(this);
}
@Override
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException {
mCallbackContext = callbackContext;
if("faceDetect".equals(action)){
if(args!=null && args.length()>0){
JSONObject obj = args.getJSONObject(0);
if(obj!=null){
if(obj.has("direction")){
String value = obj.getString("direction");
if(value!=null && !value.isEmpty()){
if("front".equals(value)){
FaceCompareActivity.actionStart(cordova.getActivity());
}else if("back".equals(value)){
FaceSerchActivity.actionStart(cordova.getActivity());
}else{
FaceCompareActivity.actionStart(cordova.getActivity());
}
}else{
FaceCompareActivity.actionStart(cordova.getActivity());
}
}else{
FaceCompareActivity.actionStart(cordova.getActivity());
}
}else{
FaceCompareActivity.actionStart(cordova.getActivity());
}
}else{
FaceCompareActivity.actionStart(cordova.getActivity());
}
return true;
}else if("getLocalImage".equals(action)){
String imgPath = args.getString(0);
byte[] bytes = Utils.File2byte(imgPath);
String path = Base64Util.encode(bytes);
mCallbackContext.success(path);
return true;
}
return super.execute(action, args, callbackContext);
}
@Override
public void sendMessage(String msg) {
try {
JSONObject msgObj = new JSONObject(msg);
mCallbackContext.success(msgObj);
} catch (JSONException e) {
e.printStackTrace();
}
}
}
package com.hand.face.utils;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Environment;
import android.provider.MediaStore;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
public class FaceUtil {
public final static int REQUEST_PICTURE_CHOOSE = 1;
public final static int REQUEST_CAMERA_IMAGE = 2;
public final static int REQUEST_CROP_IMAGE = 3;
/***
* 裁剪图片
* @param activity Activity
* @param uri 图片的Uri
*/
public static void cropPicture(Activity activity, Uri uri) {
Intent innerIntent = new Intent("com.android.camera.action.CROP");
innerIntent.setDataAndType(uri, "image/*");
innerIntent.putExtra("crop", "true");// 才能出剪辑的小方框,不然没有剪辑功能,只能选取图片
innerIntent.putExtra("aspectX", 1); // 放大缩小比例的X
innerIntent.putExtra("aspectY", 1);// 放大缩小比例的X 这里的比例为: 1:1
innerIntent.putExtra("outputX", 320); //这个是限制输出图片大小
innerIntent.putExtra("outputY", 320);
innerIntent.putExtra("return-data", true);
// 切图大小不足输出,无黑框
innerIntent.putExtra("scale", true);
innerIntent.putExtra("scaleUpIfNeeded", true);
File imageFile = new File(getImagePath(activity.getApplicationContext()));
innerIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(imageFile));
innerIntent.putExtra("outputFormat", Bitmap.CompressFormat.JPEG.toString());
activity.startActivityForResult(innerIntent, REQUEST_CROP_IMAGE);
}
/**
* 保存裁剪的图片的路径
* @return
*/
public static String getImagePath(Context context){
String path;
if(!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
path = context.getFilesDir().getAbsolutePath();
} else {
path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/msc/";
}
if(!path.endsWith("/")) {
path += "/";
}
File folder = new File(path);
if (folder != null && !folder.exists()) {
folder.mkdirs();
}
path += "ifd.jpg";
return path;
}
/**
* 读取图片属性:旋转的角度
*
* @param path 图片绝对路径
* @return degree 旋转角度
*/
public static int readPictureDegree(String path) {
int degree = 0;
try {
ExifInterface exifInterface = new ExifInterface(path);
int orientation = exifInterface.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
degree = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
degree = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
degree = 270;
break;
}
} catch (IOException e) {
e.printStackTrace();
}
return degree;
}
/**
* 旋转图片
*
* @param angle 旋转角度
* @param bitmap 原图
* @return bitmap 旋转后的图片
*/
public static Bitmap rotateImage(int angle, Bitmap bitmap) {
// 图片旋转矩阵
Matrix matrix = new Matrix();
matrix.postRotate(angle);
// 得到旋转后的图片
Bitmap resizedBitmap = Bitmap.createBitmap(bitmap, 0, 0,
bitmap.getWidth(), bitmap.getHeight(), matrix, true);
return resizedBitmap;
}
/**
* 保存Bitmap至本地
*/
public static void saveBitmapToFile(Context context,Bitmap bmp){
String file_path = getImagePath(context);
File file = new File(file_path);
FileOutputStream fOut;
try {
fOut = new FileOutputStream(file);
bmp.compress(Bitmap.CompressFormat.JPEG, 85, fOut);
fOut.flush();
fOut.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
package com.hand.face.view;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Face;
import android.util.AttributeSet;
import android.widget.ImageView;
import com.hand.face.myinterface.CameraInterface;
import com.hand.face.utils.Utils;
/**
* Created by xiang.wang on 2016/12/10.
*/
public class FaceView extends ImageView {
private static final String TAG = "FaceView";
private Context mContext;
private Paint mLinePaint;
private Face[] mFaces;
private Matrix mMatrix = new Matrix();
private RectF mRect = new RectF();
private Drawable mFaceIndicator = null;
public FaceView(Context context, AttributeSet attrs) {
super(context, attrs);
// TODO Auto-generated constructor stub
initPaint();
mContext = context;
mFaceIndicator = getResources().getDrawable(Utils.getResourceId(mContext, "ic_face_find_2", "drawable"));
}
public void setFaces(Face[] faces){
this.mFaces = faces;
invalidate();
}
public void clearFaces(){
mFaces = null;
invalidate();
}
@Override
protected void onDraw(Canvas canvas) {
// TODO Auto-generated method stub
if(mFaces == null || mFaces.length < 1){
return;
}
boolean isMirror = false;
int Id = CameraInterface.getInstance().getCameraId();
if(Id == CameraInfo.CAMERA_FACING_BACK){
isMirror = false; //后置Camera无需mirror
}else if(Id == CameraInfo.CAMERA_FACING_FRONT){
isMirror = true; //前置Camera需要mirror
}
Utils.prepareMatrix(mMatrix, isMirror, 90, getWidth(), getHeight());
canvas.save();
mMatrix.postRotate(0); //Matrix.postRotate默认是顺时针
canvas.rotate(-0); //Canvas.rotate()默认是逆时针
for(int i = 0; i< mFaces.length; i++){
mRect.set(mFaces[i].rect);
mMatrix.mapRect(mRect);
mFaceIndicator.setBounds(Math.round(mRect.left), Math.round(mRect.top),
Math.round(mRect.right), Math.round(mRect.bottom));
mFaceIndicator.draw(canvas);
// canvas.drawRect(mRect, mLinePaint);
}
canvas.restore();
super.onDraw(canvas);
}
private void initPaint(){
mLinePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
// int color = Color.rgb(0, 150, 255);
int color = Color.rgb(98, 212, 68);
// mLinePaint.setColor(Color.RED);
mLinePaint.setColor(color);
mLinePaint.setStyle(Style.STROKE);
mLinePaint.setStrokeWidth(5f);
mLinePaint.setAlpha(180);
}
}
package com.hand.face.utils;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.os.Environment;
import android.util.Log;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
/**
* Created by USER on 2016/12/10.
*/
public class FileUtil {
private static final String TAG = "FileUtil";
private static final File parentPath = Environment.getExternalStorageDirectory();
private static String storagePath = "";
private static final String DST_FOLDER_NAME = ".HandImage";
private static String jpegPath = "";
/**初始化保存路径
* @return
*/
private static String initPath(String appPath){
if(storagePath.equals("")){
storagePath = appPath+"/" + DST_FOLDER_NAME;
File f = new File(storagePath);
if(!f.exists()){
f.mkdir();
}
}
return storagePath;
}
/**保存Bitmap到sdcard
* @param b
*/
public static void saveBitmap(Bitmap b,String appPath){
//等比例宽高压缩
Matrix matrix = new Matrix();
matrix.setScale(0.7f, 0.7f);
b = Bitmap.createBitmap(b, 0, 0, b.getWidth(), b.getHeight(), matrix, true);
String path = initPath(appPath);
long dataTake = System.currentTimeMillis();
// String dataTake = "myFace";
String jpegName = path + "/" + dataTake +".jpg";
jpegPath = jpegName;
Log.i(TAG, "saveBitmap:jpegName = " + jpegName);
try {
FileOutputStream fout = new FileOutputStream(jpegName);
BufferedOutputStream bos = new BufferedOutputStream(fout);
b.compress(Bitmap.CompressFormat.JPEG, 70, bos);
bos.flush();
bos.close();
Log.i(TAG, "saveBitmap成功");
} catch (IOException e) {
// TODO Auto-generated catch block
Log.i(TAG, "saveBitmap:失败");
e.printStackTrace();
}
}
public static String getSavePath(){
return jpegPath;
}
//剪切图片中间的正方形 指定图片某一边长
public static Bitmap centerSquareScaleBitmap(Bitmap bitmap, int edgeLength)
{
if(null == bitmap || edgeLength <= 0)
{
return null;
}
Bitmap result = bitmap;
int widthOrg = bitmap.getWidth();
int heightOrg = bitmap.getHeight();
if(widthOrg > edgeLength && heightOrg > edgeLength)
{
//压缩到一个最小长度是edgeLength的bitmap
int longerEdge = (int)(edgeLength * Math.max(widthOrg, heightOrg) / Math.min(widthOrg, heightOrg));
int scaledWidth = widthOrg > heightOrg ? longerEdge : edgeLength;
int scaledHeight = widthOrg > heightOrg ? edgeLength : longerEdge;
Bitmap scaledBitmap;
try{
scaledBitmap = Bitmap.createScaledBitmap(bitmap, scaledWidth, scaledHeight, true);
}
catch(Exception e){
return null;
}
//从图中截取正中间的正方形部分。
int xTopLeft = (scaledWidth - edgeLength) / 2;
int yTopLeft = (scaledHeight - edgeLength) / 2;
try{
result = Bitmap.createBitmap(scaledBitmap, xTopLeft, yTopLeft, edgeLength, edgeLength);
scaledBitmap.recycle();
}
catch(Exception e){
return null;
}
}
return result;
}
}
package com.hand.face.utils;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Face;
import android.hardware.Camera.FaceDetectionListener;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
/**
* Created by USER on 2016/12/10.
*/
public class GoogleFaceDetect implements FaceDetectionListener {
private static final String TAG = "YanZi";
private Context mContext;
private Handler mHander;
public GoogleFaceDetect(Context c, Handler handler){
mContext = c;
mHander = handler;
}
@Override
public void onFaceDetection(Face[] faces, Camera camera) {
// TODO Auto-generated method stub
Log.i(TAG, "onFaceDetection...");
if(faces != null){
Message m = mHander.obtainMessage();
m.what = EventUtil.UPDATE_FACE_RECT;
m.obj = faces;
m.sendToTarget();
}
}
/* private Rect getPropUIFaceRect(Rect r){
Log.i(TAG, "人脸检测 = " + r.flattenToString());
Matrix m = new Matrix();
boolean mirror = false;
m.setScale(mirror ? -1 : 1, 1);
Point p = DisplayUtil.getScreenMetrics(mContext);
int uiWidth = p.x;
int uiHeight = p.y;
m.postScale(uiWidth/2000f, uiHeight/2000f);
int leftNew = (r.left + 1000)*uiWidth/2000;
int topNew = (r.top + 1000)*uiHeight/2000;
int rightNew = (r.right + 1000)*uiWidth/2000;
int bottomNew = (r.bottom + 1000)*uiHeight/2000;
return new Rect(leftNew, topNew, rightNew, bottomNew);
}*/
}
package com.youtu.sign;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
public class HMACSHA1 {
private static final String HMAC_SHA1 = "HmacSHA1";
public static byte[] getSignature(String data, String key) throws Exception {
Mac mac = Mac.getInstance(HMAC_SHA1);
SecretKeySpec signingKey = new SecretKeySpec(key.getBytes(),
mac.getAlgorithm());
mac.init(signingKey);
return mac.doFinal(data.getBytes());
}
}
package com.youtu;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
public class HttpsUtil {
private static class TrustAnyTrustManager implements X509TrustManager {
public void checkClientTrusted(X509Certificate[] chain, String authType)
throws CertificateException {
}
public void checkServerTrusted(X509Certificate[] chain, String authType)
throws CertificateException {
}
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[] {};
}
}
private static class TrustAnyHostnameVerifier implements HostnameVerifier {
public boolean verify(String hostname, SSLSession session) {
return true;
}
}
/**
* post方式请求服务器(https协议)
*
* @param url
* 请求地址
* @param content
* 参数
* @param charset
* 编码
* @return
* @throws NoSuchAlgorithmException
* @throws KeyManagementException
* @throws IOException
*/
public static byte[] post(String url, String content, String charset)
throws NoSuchAlgorithmException, KeyManagementException,
IOException {
SSLContext sc = SSLContext.getInstance("SSL");
sc.init(null, new TrustManager[] { new TrustAnyTrustManager() },
new java.security.SecureRandom());
URL console = new URL(url);
HttpsURLConnection conn = (HttpsURLConnection) console.openConnection();
conn.setSSLSocketFactory(sc.getSocketFactory());
conn.setHostnameVerifier(new TrustAnyHostnameVerifier());
conn.setDoOutput(true);
conn.connect();
DataOutputStream out = new DataOutputStream(conn.getOutputStream());
out.write(content.getBytes(charset));
// ˢ�¡��ر�
out.flush();
out.close();
InputStream is = conn.getInputStream();
if (is != null) {
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int len = 0;
while ((len = is.read(buffer)) != -1) {
outStream.write(buffer, 0, len);
}
is.close();
return outStream.toByteArray();
}
return null;
}
}
package com.hand.face.utils;
import android.graphics.Bitmap;
import android.graphics.Matrix;
/**
* Created by USER on 2016/12/10.
*/
public class ImageUtil {
/**
* 旋转图片
*/
public static Bitmap getRotateBitmap(Bitmap b, float rotateDegree){
Matrix matrix = new Matrix();
matrix.postRotate((float)rotateDegree);
Bitmap rotaBitmap = Bitmap.createBitmap(b, 0, 0, b.getWidth(), b.getHeight(), matrix, false);
return rotaBitmap;
}
}
package com.hand.face.common;
import android.app.Dialog;
import android.content.Context;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.hand.face.utils.Utils;
public class LoadingDialog extends Dialog {
private LinearLayout parentLayout;
private TextView loadingText;
public LoadingDialog(Context context) {
super(context, Utils.getResourceId(context, "loading_dialog", "style"));
init(context);
}
public LoadingDialog(Context context, int theme) {
super(context, Utils.getResourceId(context, "loading_dialog", "style"));
init(context);
}
private void init(Context context){
this.setContentView(LayoutInflater.from(context).inflate(Utils.getResourceId(context, "loading_dialog", "layout"), null));
parentLayout = (LinearLayout) findViewById(Utils.getResourceId(context, "dialog_view", "id"));
loadingText = (TextView) findViewById(Utils.getResourceId(context, "text", "id"));
this.setCancelable(false);
}
//设置名字和外层背景色
public void setText(String text, int color){
if(!TextUtils.isEmpty(text)) {
parentLayout.setBackgroundColor(color);
loadingText.setText(text);
loadingText.setVisibility(View.VISIBLE);
}else {
loadingText.setVisibility(View.GONE);
}
}
public void setText(String text){
if(!TextUtils.isEmpty(text)) {
loadingText.setText(text);
loadingText.setVisibility(View.VISIBLE);
}else {
loadingText.setVisibility(View.GONE);
}
}
}
package com.youtu.sign;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.MessageDigest;
public class MD5 {
public static String stringToMD5(String str) {
try {
byte[] strTemp = str.getBytes();
MessageDigest mdTemp = MessageDigest.getInstance("MD5");
mdTemp.update(strTemp);
return toHexString(mdTemp.digest());
} catch (Exception e) {
return null;
}
}
public static String fileNameToMD5(String fileName) {
InputStream inputStream = null;
try {
inputStream = new FileInputStream(fileName);
return streamToMD5(inputStream);
} catch (Exception e) {
return null;
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
public static String streamToMD5(InputStream inputStream) {
try {
MessageDigest mdTemp = MessageDigest.getInstance("MD5");
byte[] buffer = new byte[1024];
int numRead = 0;
while ((numRead = inputStream.read(buffer)) > 0) {
mdTemp.update(buffer, 0, numRead);
}
return toHexString(mdTemp.digest());
} catch (Exception e) {
return null;
}
}
private static String toHexString(byte[] md) {
char hexDigits[] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'a', 'b', 'c', 'd', 'e', 'f' };
int j = md.length;
char str[] = new char[j * 2];
for (int i = 0; i < j; i++) {
byte byte0 = md[i];
str[2 * i] = hexDigits[byte0 >>> 4 & 0xf];
str[i * 2 + 1] = hexDigits[byte0 & 0xf];
}
return new String(str);
}
}
package com.hand.face.view;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.widget.ImageView;
import com.hand.face.utils.DisplayUtil;
/**
* Created by USER on 2016/12/13.
*/
public class MaskView extends ImageView {
int widthScreen, heightScreen;
private RectF shelterR;
private Paint mPaint;
public MaskView(Context context) {
super(context);
}
public MaskView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
public MaskView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context);
}
private void init(Context context){
Point p = DisplayUtil.getScreenMetrics(context);
widthScreen = p.x;
heightScreen = p.y;
mPaint = new Paint();
mPaint.setStyle(Paint.Style.FILL);
mPaint.setStrokeWidth(2);
mPaint.setAntiAlias(true);
mPaint.setColor(Color.WHITE);
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (shelterR == null || shelterR.isEmpty()) {
shelterR = new RectF(0, 0,widthScreen, heightScreen);
}
// 画入前景圆形蒙板层
int sc = canvas.saveLayer(shelterR, null, Canvas.MATRIX_SAVE_FLAG
| Canvas.CLIP_SAVE_FLAG | Canvas.HAS_ALPHA_LAYER_SAVE_FLAG
| Canvas.FULL_COLOR_LAYER_SAVE_FLAG
| Canvas.CLIP_TO_LAYER_SAVE_FLAG | Canvas.ALL_SAVE_FLAG);
mPaint.setAlpha(125);
canvas.drawRect(shelterR, mPaint);
mPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.DST_OUT));
mPaint.setColor(Color.WHITE);
canvas.drawCircle(getWidth() / 2, getHeight() / 2, widthScreen*9/20, mPaint);
canvas.restoreToCount(sc);
mPaint.setXfermode(null);
mPaint.setColor(Color.WHITE);
}
}
package com.hand.face.myinterface;
/**
* Created by xiangwang on 2016/12/14.
*/
public interface NotifyMessage {
public void sendMessage(String msg);
}
package com.hand.face.utils;
import com.hand.face.myinterface.NotifyMessage;
/**
* Created by USER on 2016/12/14.
*/
public class NotifyMessageManager {
private static NotifyMessageManager notify;
private NotifyMessage listener;
private NotifyMessageManager(){
}
public static synchronized NotifyMessageManager getInstance() {
if (notify == null) {
notify = new NotifyMessageManager();
}
return notify;
}
public void setNotifyMessage(NotifyMessage nm){
listener = nm;
}
public void sendNotifyMessage(String msg){
listener.sendMessage(msg);
}
}
package com.hand.face.utils;
import android.content.Context;
import android.graphics.Matrix;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
/**
* Created by USER on 2016/12/8.
*/
public class Utils {
private static long lastClickTime;
public static boolean isFastDoubleClick() {
long time = System.currentTimeMillis();
long timeD = time - lastClickTime;
if ( 0 < timeD && timeD < 800) {
return true;
}
lastClickTime = time;
return false;
}
public static void prepareMatrix(Matrix matrix, boolean mirror, int displayOrientation,
int viewWidth, int viewHeight) {
// Need mirror for front camera.
matrix.setScale(mirror ? -1 : 1, 1);
// This is the value for android.hardware.Camera.setDisplayOrientation.
matrix.postRotate(displayOrientation);
// Camera driver coordinates range from (-1000, -1000) to (1000, 1000).
// UI coordinates range from (0, 0) to (width, height).
matrix.postScale(viewWidth / 2000f, viewHeight / 2000f);
matrix.postTranslate(viewWidth / 2f, viewHeight / 2f);
}
public static int getResourceId(Context context, String name, String type) {
try {
String packageName = context.getPackageName();
return context.getResources().getIdentifier(name, type, packageName);
} catch (Exception e) {
e.printStackTrace();
}
return 0;
}
public static byte[] File2byte(String filePath)
{
byte[] buffer = null;
try
{
File file = new File(filePath);
FileInputStream fis = new FileInputStream(file);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
byte[] b = new byte[1024];
int n;
while ((n = fis.read(b)) != -1)
{
bos.write(b, 0, n);
}
fis.close();
bos.close();
buffer = bos.toByteArray();
}
catch (FileNotFoundException e)
{
e.printStackTrace();
}
catch (IOException e)
{
e.printStackTrace();
}
return buffer;
}
}
package com.youtu.sign;
import java.util.Random;
public class YoutuSign {
/**
*app_sign 时效性签名
*@param appId http://open.youtu.qq.com/上申请的业务ID
*@param secret_id http://open.youtu.qq.com/上申请的密钥id
*@param secret_key http://open.youtu.qq.com/上申请的密钥key
*@param expired 签名过期时间
*@param userid 业务账号系统,没有可以不填
*@param mySign 生成的签名
*@return 0表示成功
*/
public static int appSign(String appId, String secret_id, String secret_key,
long expired, String userid, StringBuffer mySign) {
return appSignBase(appId, secret_id, secret_key, expired, "3041722595", null, mySign);
}
private static int appSignBase(String appId, String secret_id,
String secret_key, long expired, String userid, String url,
StringBuffer mySign) {
if (empty(secret_id) || empty(secret_key))
{
return -1;
}
String puserid = "";
if (!empty(userid))
{
if (userid.length() > 64)
{
return -2;
}
puserid = userid;
}
long now = System.currentTimeMillis() / 1000;
int rdm = Math.abs(new Random().nextInt());
String plain_text = "a=" + appId + "&k=" + secret_id + "&e=" + expired + "&t=" + now + "&r=" + rdm + "&u=" + puserid ;//+ "&f=" + fileid.toString();
byte[] bin = hashHmac(plain_text, secret_key);
byte[] all = new byte[bin.length + plain_text.getBytes().length];
System.arraycopy(bin, 0, all, 0, bin.length);
System.arraycopy(plain_text.getBytes(), 0, all, bin.length, plain_text.getBytes().length);
mySign.append(Base64Util.encode(all));
return 0;
}
private static byte[] hashHmac(String plain_text, String accessKey) {
try {
return HMACSHA1.getSignature(plain_text, accessKey);
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
public static boolean empty(String s){
return s == null || s.trim().equals("") || s.trim().equals("null");
}
}
<?xml version="1.0" encoding="UTF-8"?>
<animation-list xmlns:android="http://schemas.android.com/apk/res/android"
android:oneshot="false" >
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading1"
android:gravity="left" />
</item>
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading2"
android:gravity="left" />
</item>
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading3"
android:gravity="left" />
</item>
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading4"
android:gravity="left" />
</item>
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading5"
android:gravity="left" />
</item>
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading6"
android:gravity="left" />
</item>
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading7"
android:gravity="left" />
</item>
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading8"
android:gravity="left" />
</item>
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading9"
android:gravity="left" />
</item>
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading10"
android:gravity="left" />
</item>
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading11"
android:gravity="left" />
</item>
<item android:duration="150">
<clip
android:clipOrientation="horizontal"
android:drawable="@drawable/loading12"
android:gravity="left" />
</item>
</animation-list>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".FaceCompareActivity"
>
<FrameLayout
android:layout_width="match_parent"
android:layout_height="match_parent" >
<com.hand.face.view.CameraSurfaceView
android:id="@+id/camera_surfaceview"
android:layout_width="0dip"
android:layout_height="0dip" />
<com.hand.face.view.FaceView
android:id="@+id/face_view"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>
<com.hand.face.view.MaskView
android:id="@+id/mask"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<TextView
android:id="@+id/textv_face_info"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center"
android:textSize="20sp"
android:textColor="#000000"
android:layout_alignParentTop="true"
android:layout_marginTop="30dp"/>
<ImageButton
android:id="@+id/btn_switch"
android:layout_width="45dp"
android:layout_height="45dp"
android:layout_alignParentRight="true"
android:layout_marginRight="5dp"
android:layout_marginTop="5dp"
android:background="@drawable/ic_switch_camera" />
<!-- <ImageView
android:id="@+id/img"
android:layout_width="100dp"
android:layout_height="100dp"
android:layout_alignParentBottom="true"
android:src="@drawable/ic_face_find_2"/>-->
</RelativeLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/dialog_view"
android:orientation="vertical"
android:paddingTop="20dp"
android:paddingBottom="20dp"
android:paddingLeft="20dp"
android:paddingRight="20dp"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
<ProgressBar
android:layout_gravity="center"
android:indeterminateDrawable="@drawable/loading_drawable"
android:layout_width="50dp"
android:layout_height="50dp" />
<TextView
android:id="@+id/text"
android:layout_gravity="center"
android:visibility="gone"
android:textColor="@color/white"
android:layout_marginTop="10dp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>
</LinearLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#3F51B5</color>
<color name="colorPrimaryDark">#303F9F</color>
<color name="colorAccent">#FF4081</color>
<color name="loading_dialog_bg">#4F000000</color>
<color name="white">#FFFFFF</color>
<color name="black">#000000</color>
</resources>
<resources>
<!-- Base application theme. -->
<style name="loading_dialog" parent="android:style/Theme.Dialog">
<item name="android:windowFrame">@null</item>
<item name="android:windowNoTitle">true</item>
<item name="android:windowBackground">@color/loading_dialog_bg</item>
<item name="android:windowIsFloating">true</item>
<item name="android:windowContentOverlay">@null</item>
</style>
<style name="Dialog_No_Border">
<item name="android:windowIsFloating">true</item>
<item name="android:windowBackground">@android:color/transparent</item>
</style>
</resources>
//
// AliyunOSSiOS.h
// AliyunOSSiOS
//
// Created by xuyecan on 28/11/2016.
// Copyright © 2016 xuyecan. All rights reserved.
//
#import <UIKit/UIKit.h>
//! Project version number for AliyunOSSiOS.
FOUNDATION_EXPORT double AliyunOSSiOSVersionNumber;
//! Project version string for AliyunOSSiOS.
FOUNDATION_EXPORT const unsigned char AliyunOSSiOSVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <AliyunOSSiOS/PublicHeader.h>
#import "OSSService.h"
#import "OSSCompat.h"
/*
* Copyright (c) 2014, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#import "OSSCancellationToken.h"
#import "OSSCancellationTokenRegistration.h"
#import "OSSCancellationTokenSource.h"
#import "OSSExecutor.h"
#import "OSSTask.h"
#import "OSSTaskCompletionSource.h"
NS_ASSUME_NONNULL_BEGIN
/**
A string containing the version of the Bolts Framework used by the current application.
*/
extern NSString *const OSSBoltsFrameworkVersionString;
NS_ASSUME_NONNULL_END
/*
* Copyright (c) 2014, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#import <Foundation/Foundation.h>
#import "OSSCancellationTokenRegistration.h"
NS_ASSUME_NONNULL_BEGIN
/*!
A block that will be called when a token is cancelled.
*/
typedef void(^OSSCancellationBlock)();
/*!
The consumer view of a CancellationToken.
Propagates notification that operations should be canceled.
A OSSCancellationToken has methods to inspect whether the token has been cancelled.
*/
@interface OSSCancellationToken : NSObject
/*!
Whether cancellation has been requested for this token source.
*/
@property (nonatomic, assign, readonly, getter=isCancellationRequested) BOOL cancellationRequested;
/*!
Register a block to be notified when the token is cancelled.
If the token is already cancelled the delegate will be notified immediately.
*/
- (OSSCancellationTokenRegistration *)registerCancellationObserverWithBlock:(OSSCancellationBlock)block;
@end
NS_ASSUME_NONNULL_END
/*
* Copyright (c) 2014, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/*!
Represents the registration of a cancellation observer with a cancellation token.
Can be used to unregister the observer at a later time.
*/
@interface OSSCancellationTokenRegistration : NSObject
/*!
Removes the cancellation observer registered with the token
and releases all resources associated with this registration.
*/
- (void)dispose;
@end
NS_ASSUME_NONNULL_END
/*
* Copyright (c) 2014, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@class OSSCancellationToken;
/*!
OSSCancellationTokenSource represents the producer side of a CancellationToken.
Signals to a CancellationToken that it should be canceled.
It is a cancellation token that also has methods
for changing the state of a token by cancelling it.
*/
@interface OSSCancellationTokenSource : NSObject
/*!
Creates a new cancellation token source.
*/
+ (instancetype)cancellationTokenSource;
/*!
The cancellation token associated with this CancellationTokenSource.
*/
@property (nonatomic, strong, readonly) OSSCancellationToken *token;
/*!
Whether cancellation has been requested for this token source.
*/
@property (nonatomic, assign, readonly, getter=isCancellationRequested) BOOL cancellationRequested;
/*!
Cancels the token if it has not already been cancelled.
*/
- (void)cancel;
/*!
Schedules a cancel operation on this CancellationTokenSource after the specified number of milliseconds.
@param millis The number of milliseconds to wait before completing the returned task.
If delay is `0` the cancel is executed immediately. If delay is `-1` any scheduled cancellation is stopped.
*/
- (void)cancelAfterDelay:(int)millis;
/*!
Releases all resources associated with this token source,
including disposing of all registrations.
*/
- (void)dispose;
@end
NS_ASSUME_NONNULL_END
//
// OSSClient.h
// oss_ios_sdk
//
// Created by zhouzhuo on 8/16/15.
// Copyright (c) 2015 aliyun.com. All rights reserved.
//
#import <Foundation/Foundation.h>
@class OSSGetServiceRequest;
@class OSSCreateBucketRequest;
@class OSSDeleteBucketRequest;
@class OSSHeadObjectRequest;
@class OSSGetBucketRequest;
@class OSSGetBucketACLRequest;
@class OSSGetObjectRequest;
@class OSSPutObjectRequest;
@class OSSPutObjectACLRequest;
@class OSSDeleteObjectRequest;
@class OSSCopyObjectRequest;
@class OSSInitMultipartUploadRequest;
@class OSSUploadPartRequest;
@class OSSCompleteMultipartUploadRequest;
@class OSSListPartsRequest;
@class OSSAbortMultipartUploadRequest;
@class OSSAppendObjectRequest;
@class OSSResumableUploadRequest;
@class OSSTask;
@class OSSExecutor;
@class OSSNetworking;
@class OSSClientConfiguration;
@protocol OSSCredentialProvider;
NS_ASSUME_NONNULL_BEGIN
/**
OSSClient是OSS服务的iOS客户端,它为调用者提供了一系列的方法,用于和OSS服务进行交互。
一般来说,全局内只需要保持一个OSSClient,用来调用各种操作。
*/
@interface OSSClient : NSObject
/**
OSS访问域名
*/
@property (nonatomic, strong) NSString * endpoint;
/**
用以收发网络请求
*/
@property (nonatomic, strong) OSSNetworking * networking;
/**
提供访问所需凭证
*/
@property (nonatomic, strong) id<OSSCredentialProvider> credentialProvider;
/**
客户端设置
*/
@property (nonatomic, strong) OSSClientConfiguration * clientConfiguration;
/**
任务队列
*/
@property (nonatomic, strong, readonly) OSSExecutor * ossOperationExecutor;
/**
初始化OSSClient,使用默认的本地设置
@endpoint 指明Bucket所在的Region域名,2017年以后苹果要求APP符合ATS政策,这里要写https的endpoint,如 "https://oss-cn-hangzhou.aliyuncs.com"
@credentialProvider 需要实现的签名器
*/
- (instancetype)initWithEndpoint:(NSString *)endpoint
credentialProvider:(id<OSSCredentialProvider>) credentialProvider;
/**
初始化OSSClient,使用自定义设置
@endpoint 指明Bucket所在的Region域名,2017年以后苹果要求APP符合ATS政策,这里要写https的endpoint,如 "https://oss-cn-hangzhou.aliyuncs.com"
@credentialProvider 需要实现的签名器
@conf 可以设置一些本地参数如重试次数、超时时间等
*/
- (instancetype)initWithEndpoint:(NSString *)endpoint
credentialProvider:(id<OSSCredentialProvider>)credentialProvider
clientConfiguration:(OSSClientConfiguration *)conf;
#pragma mark restful-api
/**
对应RESTFul API:GetService
获取请求者当前拥有的全部Bucket。
注意:
1. 尚不支持STS;
2. 当所有的bucket都返回时,返回的xml中不包含Prefix、Marker、MaxKeys、IsTruncated、NextMarker节点,如果还有部分结果未返回,则增加上述节点,其中NextMarker用于继续查询时给marker赋值。
*/
- (OSSTask *)getService:(OSSGetServiceRequest *)request;
/**
对应RESTFul API:PutBucket
用于创建Bucket(不支持匿名访问)。默认情况下,创建的Bucket位于默认的数据中心:oss-cn-hangzhou。
用户可以显式指定Bucket位于的数据中心,从而最优化延迟,最小化费用或者满足监管要求等。
注意:
1. 尚不支持STS。
*/
- (OSSTask *)createBucket:(OSSCreateBucketRequest *)request;
/**
对应RESTFul API:DeleteBucket
用于删除某个Bucket。
*/
- (OSSTask *)deleteBucket:(OSSDeleteBucketRequest *)request;
/**
对应RESTFul API:GetBucket
用来list Bucket中所有Object的信息,可以通过prefix,marker,delimiter和max-keys对list做限定,返回部分结果。
*/
- (OSSTask *)getBucket:(OSSGetBucketRequest *)request;
/**
对应RESTFul API:GetBucketACL
用来获取某个Bucket的访问权限。
*/
- (OSSTask *)getBucketACL:(OSSGetBucketACLRequest *)request;
/**
对应RESTFul API:HeadObject
只返回某个Object的meta信息,不返回文件内容。
*/
- (OSSTask *)headObject:(OSSHeadObjectRequest *)request;
/**
对应RESTFul API:GetObject
用于获取某个Object,此操作要求用户对该Object有读权限。
*/
- (OSSTask *)getObject:(OSSGetObjectRequest *)request;
/**
对应RESTFul API:PutObject
用于上传文件。
*/
- (OSSTask *)putObject:(OSSPutObjectRequest *)request;
/**
Put Object ACL接口用于修改Object的访问权限。目前Object有三种访问权限:private, public-read, public-read-write。
Put Object ACL操作通过Put请求中的“x-oss-object-acl”头来设置,这个操作只有Bucket Owner有权限执行。如果操作成功,则返回200;否则返回相应的错误码和提示信息。
*/
- (OSSTask *)putObjectACL:(OSSPutObjectACLRequest *)request;
/**
对应RESTFul API:AppendObject
以追加写的方式上传文件。通过Append Object操作创建的Object类型为Appendable Object,而通过Put Object上传的Object是Normal Object。
*/
- (OSSTask *)appendObject:(OSSAppendObjectRequest *)request;
/**
对应RESTFul API:copyObject
拷贝一个在OSS上已经存在的object成另外一个object,可以发送一个PUT请求给OSS,并在PUT请求头中添加元素“x-oss-copy-source”来指定拷贝源。
OSS会自动判断出这是一个Copy操作,并直接在服务器端执行该操作。如果拷贝成功,则返回新的object信息给用户。
该操作适用于拷贝小于1GB的文件。
*/
- (OSSTask *)copyObject:(OSSCopyObjectRequest *)request;
/**
对应RESTFul API:DeleteObject
用于删除某个Object。
*/
- (OSSTask *)deleteObject:(OSSDeleteObjectRequest *)request;
/**
对应RESTFul API:InitiateMultipartUpload
使用Multipart Upload模式传输数据前,必须先调用该接口来通知OSS初始化一个Multipart Upload事件。该接口会返回一个OSS服务器创建的全局唯一的Upload ID,用于标识本次Multipart Upload事件。
用户可以根据这个ID来发起相关的操作,如中止Multipart Upload、查询Multipart Upload等。
*/
- (OSSTask *)multipartUploadInit:(OSSInitMultipartUploadRequest *)request;
/**
对应RESTFul API:UploadPart
初始化一个Multipart Upload之后,可以根据指定的Object名和Upload ID来分块(Part)上传数据。
每一个上传的Part都有一个标识它的号码(part number,范围是1~10,000)。
对于同一个Upload ID,该号码不但唯一标识这一块数据,也标识了这块数据在整个文件内的相对位置。
如果你用同一个part号码,上传了新的数据,那么OSS上已有的这个号码的Part数据将被覆盖。除了最后一块Part以外,其他的part最小为100KB;
最后一块Part没有大小限制。
*/
- (OSSTask *)uploadPart:(OSSUploadPartRequest *)request;
/**
对应RESTFul API:CompleteMultipartUpload
在将所有数据Part都上传完成后,必须调用Complete Multipart Upload API来完成整个文件的Multipart Upload。
在执行该操作时,用户必须提供所有有效的数据Part的列表(包括part号码和ETAG);OSS收到用户提交的Part列表后,会逐一验证每个数据Part的有效性。
当所有的数据Part验证通过后,OSS将把这些数据part组合成一个完整的Object。
*/
- (OSSTask *)completeMultipartUpload:(OSSCompleteMultipartUploadRequest *)request;
/**
对应RESTFul API:ListParts
可以罗列出指定Upload ID所属的所有已经上传成功Part。
*/
- (OSSTask *)listParts:(OSSListPartsRequest *)request;
/**
对应RESTFul API:AbortMultipartUpload
该接口可以根据用户提供的Upload ID中止其对应的Multipart Upload事件。
当一个Multipart Upload事件被中止后,就不能再使用这个Upload ID做任何操作,已经上传的Part数据也会被删除。
*/
- (OSSTask *)abortMultipartUpload:(OSSAbortMultipartUploadRequest *)request;
#pragma mark extention method
/**
对一个Object签名出一个URL,可以把该URL转给第三方实现授权访问。
@bucketName Object所在的Bucket名称
@objectKey Object名称
@interval 签名URL时,可以指定这个URL的有效时长是多久,单位是秒,比如说需要有效时长为1小时的URL,这里传入3600
*/
- (OSSTask *)presignConstrainURLWithBucketName:(NSString *)bucketName
withObjectKey:(NSString *)objectKey
withExpirationInterval:(NSTimeInterval)interval;
/**
如果Object的权限是公共读或者公共读写,调用这个接口对该Object签名出一个URL,可以把该URL转给第三方实现授权访问。
@bucketName Object所在的Bucket名称
@objectKey Object名称
*/
- (OSSTask *)presignPublicURLWithBucketName:(NSString *)bucketName
withObjectKey:(NSString *)objectKey;
/**
断点上传接口
这个接口封装了分块上传的若干接口以实现断点上传,但是需要用户自行保存UploadId。
对一个新文件,用户需要首先调用multipartUploadInit接口获得一个UploadId,然后调用此接口上传这个文件。
如果上传失败,首先需要检查一下失败原因:
如果非不可恢复的失败,那么可以用同一个UploadId和同一文件继续调用这个接口续传
否则,需要重新获取UploadId,重新上传这个文件。
详细参考demo。
*/
- (OSSTask *)resumableUpload:(OSSResumableUploadRequest *)request;
/**
查看某个Object是否存在
@bucketName Object所在的Bucket名称
@objectKey Object名称
return YES Object存在
return NO && *error = nil Object不存在
return NO && *error != nil 发生错误
*/
- (BOOL)doesObjectExistInBucket:(NSString *)bucketName
objectKey:(NSString *)objectKey
error:(const NSError **)error;
@end
NS_ASSUME_NONNULL_END
//
// OSSCompat.h
// oss_ios_sdk_new
//
// Created by zhouzhuo on 9/10/15.
// Copyright (c) 2015 aliyun.com. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "OSSService.h"
@class OSSCancellationTokenSource;
typedef OSSCancellationTokenSource OSSTaskHandler;
@interface OSSClient (Compat)
/**
兼容老版本用法的上传数据接口
建议更换使用:putObject
*/
- (OSSTaskHandler *)uploadData:(NSData *)data
withContentType:(NSString *)contentType
withObjectMeta:(NSDictionary *)meta
toBucketName:(NSString *)bucketName
toObjectKey:(NSString *)objectKey
onCompleted:(void(^)(BOOL, NSError *))onCompleted
onProgress:(void(^)(float progress))onProgress;
/**
兼容老版本用法的下载数据接口
建议更换使用:getObject
*/
- (OSSTaskHandler *)downloadToDataFromBucket:(NSString *)bucketName
objectKey:(NSString *)objectKey
onCompleted:(void(^)(NSData *, NSError *))onCompleted
onProgress:(void(^)(float progress))onProgress;
/**
兼容老版本用法的上传文件接口
建议更换使用:putObject
*/
- (OSSTaskHandler *)uploadFile:(NSString *)filePath
withContentType:(NSString *)contentType
withObjectMeta:(NSDictionary *)meta
toBucketName:(NSString *)bucketName
toObjectKey:(NSString *)objectKey
onCompleted:(void(^)(BOOL, NSError *))onCompleted
onProgress:(void(^)(float progress))onProgress;
/**
兼容老版本用法的下载文件接口
建议更换使用:getObject
*/
- (OSSTaskHandler *)downloadToFileFromBucket:(NSString *)bucketName
objectKey:(NSString *)objectKey
toFile:(NSString *)filePath
onCompleted:(void(^)(BOOL, NSError *))onCompleted
onProgress:(void(^)(float progress))onProgress;
/**
兼容老版本用法的断点上传文件接口
建议更换使用:resumableUpload
*/
- (OSSTaskHandler *)resumableUploadFile:(NSString *)filePath
withContentType:(NSString *)contentType
withObjectMeta:(NSDictionary *)meta
toBucketName:(NSString *)bucketName
toObjectKey:(NSString *)objectKey
onCompleted:(void(^)(BOOL, NSError *))onCompleted
onProgress:(void(^)(float progress))onProgress;
/**
兼容老版本用法的删除Object接口
建议更换使用:deleteObject
*/
- (void)deleteObjectInBucket:(NSString *)bucketName
objectKey:(NSString *)objectKey
onCompleted:(void(^)(BOOL, NSError *))onCompleted;
@end
\ No newline at end of file
//
// OSSDefine.h
// AliyunOSSiOS
//
// Created by zhouzhuo on 5/1/16.
// Copyright © 2016 zhouzhuo. All rights reserved.
//
#import <Foundation/Foundation.h>
#ifndef OSSDefine_h
#define OSSDefine_h
#define OSSUAPrefix @"aliyun-sdk-ios"
#define OSSSDKVersion @"2.6.0"
#define OSSListBucketResultXMLTOKEN @"ListBucketResult"
#define OSSNameXMLTOKEN @"Name"
#define OSSDelimiterXMLTOKEN @"Delimiter"
#define OSSMarkerXMLTOKEN @"Marker"
#define OSSNextMarkerXMLTOKEN @"NextMarker"
#define OSSMaxKeysXMLTOKEN @"MaxKeys"
#define OSSIsTruncatedXMLTOKEN @"IsTruncated"
#define OSSContentsXMLTOKEN @"Contents"
#define OSSKeyXMLTOKEN @"Key"
#define OSSLastModifiedXMLTOKEN @"LastModified"
#define OSSETagXMLTOKEN @"ETag"
#define OSSTypeXMLTOKEN @"Type"
#define OSSSizeXMLTOKEN @"Size"
#define OSSStorageClassXMLTOKEN @"StorageClass"
#define OSSCommonPrefixesXMLTOKEN @"CommonPrefixes"
#define OSSOwnerXMLTOKEN @"Owner"
#define OSSAccessControlListXMLTOKEN @"AccessControlList"
#define OSSGrantXMLTOKEN @"Grant"
#define OSSIDXMLTOKEN @"ID"
#define OSSDisplayNameXMLTOKEN @"DisplayName"
#define OSSBucketsXMLTOKEN @"Buckets"
#define OSSBucketXMLTOKEN @"Bucket"
#define OSSCreationDate @"CreationDate"
#define OSSPrefixXMLTOKEN @"Prefix"
#define OSSUploadIdXMLTOKEN @"UploadId"
#define OSSLocationXMLTOKEN @"Location"
#define OSSNextPartNumberMarkerXMLTOKEN @"NextPartNumberMarker"
#define OSSMaxPartsXMLTOKEN @"MaxParts"
#define OSSPartXMLTOKEN @"Part"
#define OSSPartNumberXMLTOKEN @"PartNumber"
#define OSSClientErrorDomain @"com.aliyun.oss.clientError"
#define OSSServerErrorDomain @"com.aliyun.oss.serverError"
#define OSSErrorMessageTOKEN @"ErrorMessage"
#define OSSHttpHeaderContentDisposition @"Content-Disposition"
#define OSSHttpHeaderXOSSCallback @"x-oss-callback"
#define OSSHttpHeaderXOSSCallbackVar @"x-oss-callback-var"
#define OSSHttpHeaderContentEncoding @"Content-Encoding"
#define OSSHttpHeaderContentType @"Content-Type"
#define OSSHttpHeaderContentMD5 @"Content-MD5"
#define OSSHttpHeaderCacheControl @"Cache-Control"
#define OSSHttpHeaderExpires @"Expires"
#define OSSDefaultRetryCount 3
#define OSSDefaultMaxConcurrentNum 5
#define OSSDefaultTimeoutForRequestInSecond 15
#define OSSDefaultTimeoutForResourceInSecond 7 * 24 * 60 * 60
#endif /* OSSDefine_h */
/*
* Copyright (c) 2014, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/*!
An object that can run a given block.
*/
@interface OSSExecutor : NSObject
/*!
Returns a default executor, which runs continuations immediately until the call stack gets too
deep, then dispatches to a new GCD queue.
*/
+ (instancetype)defaultExecutor;
/*!
Returns an executor that runs continuations on the thread where the previous task was completed.
*/
+ (instancetype)immediateExecutor;
/*!
Returns an executor that runs continuations on the main thread.
*/
+ (instancetype)mainThreadExecutor;
/*!
Returns a new executor that uses the given block to execute continuations.
@param block The block to use.
*/
+ (instancetype)executorWithBlock:(void(^)(void(^block)()))block;
/*!
Returns a new executor that runs continuations on the given queue.
@param queue The instance of `dispatch_queue_t` to dispatch all continuations onto.
*/
+ (instancetype)executorWithDispatchQueue:(dispatch_queue_t)queue;
/*!
Returns a new executor that runs continuations on the given queue.
@param queue The instance of `NSOperationQueue` to run all continuations on.
*/
+ (instancetype)executorWithOperationQueue:(NSOperationQueue *)queue;
/*!
Runs the given block using this executor's particular strategy.
@param block The block to execute.
*/
- (void)execute:(void(^)())block;
@end
NS_ASSUME_NONNULL_END
//
// OSSLog.h
// oss_ios_sdk
//
// Created by zhouzhuo on 8/16/15.
// Copyright (c) 2015 aliyun.com. All rights reserved.
//
#import <Foundation/Foundation.h>
// colorful log configuration
// see https://github.com/robbiehanson/XcodeColors
#define XCODE_COLORS_ESCAPE @"\033["
#define XCODE_COLORS_RESET_FG XCODE_COLORS_ESCAPE @"fg;" // Clear any foreground color
#define XCODE_COLORS_RESET_BG XCODE_COLORS_ESCAPE @"bg;" // Clear any background color
#define XCODE_COLORS_RESET XCODE_COLORS_ESCAPE @";" // Clear any foreground or background color
#define OSSLogVerbose(frmt, ...)\
if ([OSSLog isLogEnable]) {\
NSLog(@"[Verbose]: %@", [NSString stringWithFormat:(frmt), ##__VA_ARGS__]);\
}
#define OSSLogDebug(frmt, ...)\
if ([OSSLog isLogEnable]) {\
NSLog(@"[Debug]: %@", [NSString stringWithFormat:(frmt), ##__VA_ARGS__]);\
}
#define OSSLogError(frmt, ...)\
if ([OSSLog isLogEnable]) {\
NSLog(@"[Error]: %@", [NSString stringWithFormat:(frmt), ##__VA_ARGS__]);\
}
static BOOL isEnable;
@interface OSSLog : NSObject
+ (void)enableLog;
+ (void)disableLog;
+ (BOOL)isLogEnable;
@end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment