권주희

add file_server, CCTedV, whatsUp code to project

Showing 464 changed files with 13636 additions and 0 deletions
apply plugin: 'com.android.application'
android {
compileSdkVersion 28
defaultConfig {
applicationId "com.example.cctedv"
minSdkVersion 22
targetSdkVersion 28
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
packagingOptions {
exclude 'META-INF/DEPENDENCIES'
exclude 'META-INF/LICENSE'
exclude 'META-INF/LICENSE.txt'
exclude 'META-INF/license.txt'
exclude 'META-INF/NOTICE'
exclude 'META-INF/NOTICE.txt'
exclude 'META-INF/notice.txt'
exclude 'META-INF/ASL2.0'
exclude("META-INF/*.kotlin_module")
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
implementation project(path: ':libstreaming')
implementation 'com.google.android.material:material:1.0.0'
implementation("com.squareup.okhttp3:okhttp:4.6.0")
compile 'com.squareup.mimecraft:mimecraft:1.1.1'
}
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
package com.example.cctedv;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.example.cctedv", appContext.getPackageName());
}
}
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.cctedv"
android:installLocation="auto" >
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:usesCleartextTraffic="true"
android:theme="@style/AppTheme">
<activity android:name=".MainActivity">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name=".RecordActivity" />
<activity android:name=".SetUserImgActivity" />
</application>
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.RECORD_AUDIO" android:required="true" />
<uses-permission android:name="android.permission.CAMERA" android:required="true" />
</manifest>
\ No newline at end of file
package com.example.cctedv;
import android.graphics.Bitmap;
public class ImgItem {
private String mFilename;
private Bitmap selectedImage;
public ImgItem() {
mFilename = "";
}
public String getmFilename() {
return mFilename;
}
public void setmFilename(String name) {
this.mFilename = name;
}
public Bitmap getSelectedImage(){return selectedImage;}
public void setSelectedImage(Bitmap bm) {this.selectedImage = bm;}
}
package com.example.cctedv;
import android.content.Context;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import java.util.ArrayList;
public class ImgListAdapter extends BaseAdapter {
private ArrayList<ImgItem> mImgList;
private LayoutInflater mInflater;
public ImgListAdapter(ArrayList<ImgItem> mImgList) {
this.mImgList = mImgList;
}
@Override
public int getCount() {
return mImgList.size();
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final Context context = parent.getContext();
if (convertView == null) {
if (mInflater == null) {
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
convertView=LayoutInflater.from(context).inflate(R.layout.listview_img,null);
}
ImageView imgView = convertView.findViewById(R.id.img_source);
TextView fileName = convertView.findViewById(R.id.list_file_name);
ImgItem file = mImgList.get(position);
Log.i("ITEM : ", file.getmFilename());
fileName.setText(file.getmFilename());
imgView.setImageBitmap(file.getSelectedImage());
convertView.setTag("" + position);
return convertView;
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public Object getItem(int position) {
return mImgList.get(position);
}
}
package com.example.cctedv;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import java.util.ArrayList;
public class MainActivity extends AppCompatActivity {
/*
* 데이터 수집용 페이지의 메인 액티비티 입니다.
* 이 화면에서 사진 등록 페이지로 이동하거나, 사용자 등록 후 데이터 수집을 할 수 있습니다.
* */
private static final int CAMERA_PERMISSION = 1;
private static final int REQ_RECORDING_PERMISSION = 1;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
grantPermissions();
Button mButton;
Button imgActivity;
final EditText mEdit;
Button fab = findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Log.i("MainActivity","화면 전환");
Intent intent = new Intent(MainActivity.this, RecordActivity.class);
startActivity(intent);
}
});
mEdit = (EditText)findViewById(R.id.userId);
mButton = (Button)findViewById(R.id.enroll_user);
mButton.setOnClickListener(
new View.OnClickListener()
{
public void onClick(View view)
{
Singleton.getInstance().setUserId(mEdit.getText().toString());
String url = "http://victoria.khunet.net:5900/user";
final AsyncTask<Void, Void, String> execute = new NetworkTask(url, Singleton.getInstance().getUserId()).execute();
Log.v("UserId", Singleton.getInstance().getUserId());
}
});
imgActivity = (Button)findViewById(R.id.img_activity);
imgActivity.setOnClickListener(new View.OnClickListener()
{
public void onClick(View view) {
Intent intent = new Intent(MainActivity.this, SetUserImgActivity.class);
startActivity(intent);
}
});
}
public boolean grantPermissions() {
ArrayList<String> permissions_array = new ArrayList<>();
if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
permissions_array.add(Manifest.permission.WRITE_EXTERNAL_STORAGE);
permissions_array.add(Manifest.permission.READ_EXTERNAL_STORAGE);
}
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
permissions_array.add(Manifest.permission.RECORD_AUDIO);
}
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
permissions_array.add(Manifest.permission.CAMERA);
}
if(!permissions_array.isEmpty()) {
String[] permissions = new String[permissions_array.size()];
permissions_array.toArray(permissions);
//Callback으로 onRequestPermissionsResult 함수가 실행됨
ActivityCompat.requestPermissions(this, permissions, REQ_RECORDING_PERMISSION);
ActivityCompat.requestPermissions(this, permissions, CAMERA_PERMISSION);
return false;
} else {
return true;
}
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED){
Log.v(" ","Permission: " + permissions[0] + "was " + grantResults[0]);
if (requestCode == REQ_RECORDING_PERMISSION) {
for (int i = 0; i < grantResults.length ; i++) {
if (grantResults[i] < 0) {
Toast.makeText(MainActivity.this, "해당 권한을 활성화하셔야 합니다.",Toast.LENGTH_SHORT).show();
return;
}
}
Log.v("info : ","mic permission");
} else if (requestCode == CAMERA_PERMISSION) {
for (int i = 0; i < grantResults.length ; i++) {
if (grantResults[i] < 0) {
Toast.makeText(MainActivity.this, "해당 권한을 활성화하셔야 합니다.",Toast.LENGTH_SHORT).show();
return;
}
}
Log.v("info : ","mic permission");
}
}
}
}
package com.example.cctedv;
import android.os.AsyncTask;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import okhttp3.MultipartBody;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
public class NetworkTask extends AsyncTask<Void, Void, String> {
/*
* 이 NetworkTask Class를 기반으로 http 통신을 이용하여 api를 호출 할 수 있습니다.
* 이 NetworkTask Class는 사용자 프레임데이터를 송신하거나, 사용자 아이디를 등록할 때 사용됩니다.
* */
private String url;
private String data;
private File mFiles = null;
private String mDate = null;
public NetworkTask(String url, String data) {
this.url = url;
this.data = data;
}
public NetworkTask(String url, String data, File mFiles, String mDate) {
this.url = url;
this.data = data;
this.mFiles = mFiles;
this.mDate = mDate;
}
@Override
protected String doInBackground(Void... params) {
if(mFiles == null) {
RequestBody requestBody = new MultipartBody.Builder().setType(MultipartBody.FORM)
.addFormDataPart("userId", data)
.addFormDataPart("userToken", "")
.build();
OkHttpClient client = new OkHttpClient();
Request request = new Request.Builder().url(url).post(requestBody).build();
Response response = null;
try {
response = client.newCall(request).execute();
} catch (IOException e) {
e.printStackTrace();
}
if (response != null)
Log.i("RES", response.toString());
} else {
Log.i("DATA SIZE ", String.valueOf(this.data.length()));
RequestBody requestBody = new MultipartBody.Builder().setType(MultipartBody.FORM)
.addFormDataPart("befEncoding", this.data)
.addFormDataPart("userId", Singleton.getInstance().getUserId())
.addFormDataPart("timeStamp", this.mDate)
.build();
OkHttpClient client = new OkHttpClient();
Request request = new Request.Builder().url(url).post(requestBody).build();
Response response = null;
try {
response = client.newCall(request).execute();
} catch (IOException e) {
e.printStackTrace();
}
if (response != null)
Log.i("RES", response.toString());
}
return "hello";
}
@Override
protected void onPostExecute(String s) {
super.onPostExecute(s);
if(s != null)
Log.i("RESPONSE : ", s);
}
@Override
protected void onPreExecute() {
}
}
package com.example.cctedv;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Gravity;
import android.view.TextureView;
import android.widget.FrameLayout;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Base64;
import java.util.Date;
import java.util.TimeZone;
import androidx.annotation.RequiresApi;
public class RecordActivity extends Activity implements TextureView.SurfaceTextureListener {
private Camera mCamera;
private TextureView mTextureView;
private String mOutputFile; // 파일
private DateFormat mDateFormat;
private String mDate;
private String mUserName = "victoria";
public File directory;
private FileOutputStream mFileOutputStream;
private File mFiles;
private boolean isCameraOpen = false;
private int mUnitTime = 2000;
private int mRemainingFileSize;
/*
* 이 RecordActivity는 사용자별 데이터 프레임을 일정 unitTime별로 서버에 전송합니다.
* */
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_record);
mTextureView = new TextureView(this);
mTextureView.setSurfaceTextureListener(this);
setContentView(mTextureView);
}
public void settingVideoInfo() {
TimeZone mTimeZone = TimeZone.getDefault();
mDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
mDateFormat.setTimeZone(mTimeZone);
mDate = mDateFormat.format(new Date());
mOutputFile = Environment.getExternalStorageDirectory().getAbsolutePath() + "/CCTedV" + "/" + mUserName + "_" + mDate;
Log.i("PATH :: ", mOutputFile);
mFiles = new File(mOutputFile);
mRemainingFileSize = calculateGap(mDate)*44100*2;
try {
mFileOutputStream = new FileOutputStream(mOutputFile);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
public void makeDir(){
directory = new File(Environment.getExternalStorageDirectory() + File.separator + "CCTedV");
boolean success = true;
if (!directory.exists()) {
success = directory.mkdirs();
}
if (success) {
Log.v("FILE", "Directory is exist");
} else {
Log.e("FILE", "Directory not created");
}
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.i("hello", "aaa");
makeDir();
settingVideoInfo();
isCameraOpen = true;
mCamera = Camera.open();
Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
mTextureView.setLayoutParams(new FrameLayout.LayoutParams(
previewSize.width, previewSize.height, Gravity.CENTER));
try {
mCamera.setPreviewTexture(surface);
} catch (IOException t) {
}
mCamera.startPreview();
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@RequiresApi(api = Build.VERSION_CODES.O)
public void onPreviewFrame(final byte[] data, final Camera camera) {
if(isCameraOpen) {
if(!accumulateFile(data)) {
if (mFiles.exists()) {
try {
//파일 저장
mFileOutputStream.close();
mFileOutputStream.flush();
mFileOutputStream = null;
mFiles.delete();
// URL 설정.
String url = "http://victoria.khunet.net:5900/upload";
mDate = mDateFormat.format(new Date());
File photo=new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/CCTedV" + "/" + "img" + "_" + mDate+".jpeg");
FileOutputStream fos = new FileOutputStream(photo);
Bitmap bmp = mTextureView.getBitmap();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.JPEG, 100, stream);
byte[] currentData = stream.toByteArray();
// fos = new FileOutputStream(photo);
// fos.write(currentData);
// fos.flush();
// fos.close();
String s = Base64.getEncoder().encodeToString(currentData);
// AsyncTask를 통해 HttpURLConnection 수행.
(new NetworkTask(url, s, mFiles, mDate)).execute();
photo.delete();
} catch (IOException e) {
e.printStackTrace();
}
//다시 시작
settingVideoInfo();
}
}
}
// Process the contents of byte for whatever you need
}
});
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
isCameraOpen = false;
try {
mFileOutputStream.close();
mFileOutputStream = null;
} catch (IOException e) {
e.printStackTrace();
}
mCamera.stopPreview();
mCamera.release();
return true;
}
private boolean accumulateFile(byte[] byteBuffer) {
try {
if(mRemainingFileSize >0) {
if(byteBuffer != null) {
mFileOutputStream.write(byteBuffer);
mRemainingFileSize -= byteBuffer.length * 2;
return true;
} else {
// Toast.makeText(RecorderService.this, "Playing...", Toast.LENGTH_SHORT).show();
}
}
else {
mFileOutputStream.close();
}
}catch (IOException e){
Log.e("file out" , e.toString());
}
return false;
}
private int calculateGap(String date) {
String time = date.substring(11, date.length());
final int hour = Integer.parseInt(time.substring(0, 2));
final int min = Integer.parseInt(time.substring(3, 5));
final int sec = Integer.parseInt(time.substring(6, time.length()));
final int timeInSecond = hour * 3600 + min * 60 + sec;
final int gap = mUnitTime - (timeInSecond % mUnitTime);
return gap;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
}
package com.example.cctedv;
import android.content.DialogInterface;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.Toast;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.ArrayList;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
public class SetUserImgActivity extends AppCompatActivity {
private ListView mListView;
private ImgListAdapter mAdapter;
private ArrayList<ImgItem> mImgList = null;
private Button mImgAddButton;
private Button uploadButton;
private static int RESULT_LOAD_IMG = 1;
InputStream imageStream;
EditText mItemName;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_img);
mListView = findViewById(R.id.list_view);
mImgAddButton = (Button)findViewById(R.id.img_upload);
mImgList = new ArrayList<>();
ImgItem t = new ImgItem();
// t.setmFilename("");
mImgList.add(t);
mListView.setAdapter(mAdapter);
mImgAddButton.setOnClickListener(new View.OnClickListener()
{
public void onClick(View view) {
openBuilder();
}
});
}
// @Override
// protected void onActivityResult(int reqCode, int resultCode, Intent data) {
// super.onActivityResult(reqCode, resultCode, data);
// if (resultCode == RESULT_OK) {
// try {
// final Uri imageUri = data.getData();
// imageStream = getContentResolver().openInputStream(imageUri);
//
// } catch (FileNotFoundException e) {
// e.printStackTrace();
//// Toast.makeText(PostImage.this, "Something went wrong", Toast.LENGTH_LONG).show();
// }
//
// }else {
//// Toast.makeText(PostImage.this, "You haven't picked Image",Toast.LENGTH_LONG).show();
// }
// }
public void openBuilder() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
// Get the layout inflater
LayoutInflater inflater = this.getLayoutInflater();
View view = inflater.inflate(R.layout.dialog_img_upload, null);
// AlertDialog alert = builder.create();
// mItemName = (EditText) view.findViewById(R.id.filename);
// uploadButton = (Button) view.findViewById(R.id.select_img);
// uploadButton.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
// Log.i("??","!");
// Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
// photoPickerIntent.setType("image/*");
// startActivityForResult(photoPickerIntent, RESULT_LOAD_IMG);
// }
// });
builder.setView(inflater.inflate(R.layout.dialog_img_upload, null))
// Add action buttons
.setNegativeButton("UPLOAD", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
// Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
// photoPickerIntent.setType("image/*");
// startActivityForResult(photoPickerIntent, RESULT_LOAD_IMG);
Intent i = new Intent(
Intent.ACTION_PICK,
android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(i, RESULT_LOAD_IMG);
}
});
// builder.setView(inflater.inflate(R.layout.dialog_img_upload, null))
// // Add action buttons
// .setPositiveButton("OK", new DialogInterface.OnClickListener() {
// public void onClick(DialogInterface dialog, int id) {
// ImgItem t = new ImgItem();
//// t.setmFilename(mItemName.getText().toString());
//// Log.i("name : ", mItemName.getText().toString());
// final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
// t.setSelectedImage(selectedImage);
// Log.i("?", selectedImage.toString());
// mImgList.add(t);
// mAdapter = new ImgListAdapter(mImgList);
//
// mListView.setAdapter(mAdapter);
// }
// });
builder.show();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == RESULT_LOAD_IMG && resultCode == RESULT_OK && null != data) {
Uri selectedImage = data.getData();
String[] filePathColumn = { MediaStore.Images.Media.DATA };
Cursor cursor = getContentResolver().query(selectedImage,
filePathColumn, null, null, null);
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
String picturePath = cursor.getString(columnIndex);
cursor.close();
ImgItem t = new ImgItem();
// t.setmFilename(mItemName.getText().toString());
// Log.i("name : ", mItemName.getText().toString());
Bitmap selectedImg = BitmapFactory.decodeFile(picturePath);
t.setSelectedImage(selectedImg);
mImgList.add(t);
mAdapter = new ImgListAdapter(mImgList);
mListView.setAdapter(mAdapter);
// ImageView imageView = (ImageView) findViewById(R.id.imgView);
// imageView.setImageBitmap(BitmapFactory.decodeFile(picturePath));
}
}
@Override
public void onResume() {
super.onResume();
mAdapter = new ImgListAdapter(mImgList);
Log.d("size", String.valueOf(mImgList.size()));
mListView.setAdapter(mAdapter);
}
}
package com.example.cctedv;
public class Singleton {
private String userId;
public String getUserId()
{
return userId;
}
public void setUserId(String data)
{
this.userId = data;
}
private static Singleton instance = null;
/*
* 이 Singleton은 애플리케이션이 시작될 때 어떤 클래스가 최초 한번만 메모리를 할당하고(Static) 그 메모리에 인스턴스를 만들어 사용하는 클래스입니다.
* 유저 정보를 저장하고 접근할 때, 사용됩니다.
* */
public static synchronized Singleton getInstance(){
if(null == instance){
instance = new Singleton();
}
return instance;
}
}
<vector android:height="24dp" android:tint="#FFFFFF"
android:viewportHeight="24.0" android:viewportWidth="24.0"
android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="#010101" android:pathData="M12,12m-8,0a8,8 0,1 1,16 0a8,8 0,1 1,-16 0"/>
</vector>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:tools="http://schemas.android.com/tools">
<LinearLayout
android:id="@+id/rl_down"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".SetUserImgActivity"
android:orientation="vertical"
android:background="@drawable/pic_back02"
tools:ignore="MissingConstraints">
<Button
android:id="@+id/img_upload"
android:layout_width="137dp"
android:layout_height="137dp"
android:layout_marginTop="40dp"
android:layout_gravity="center"
android:background="@drawable/btn_upload"
/>
<ListView
android:id="@+id/list_view"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</LinearLayout>
</androidx.constraintlayout.widget.ConstraintLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<LinearLayout
android:id="@+id/rl"
android:layout_width="match_parent"
android:layout_height="560dp"
android:background="@drawable/pic_back"
android:orientation="vertical"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
tools:context=".MainActivity">
<LinearLayout
android:id="@+id/rl_up"
android:layout_width="match_parent"
android:layout_height="300dp"
android:orientation="vertical"
tools:context=".MainActivity">
<TextView
android:layout_width="315dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginTop="45dp"
android:fontFamily="@res/font/nanumsquarer"
android:gravity="left"
android:text="User ID Setting"
android:textColor="#FFFFFF"
android:textSize="24dp"
/>
<EditText
android:id="@+id/userId"
android:layout_width="315dp"
android:layout_height="56dp"
android:layout_gravity="center"
android:layout_marginTop="45dp"
android:backgroundTint = "#FFFFFF"/>
<Button
android:id="@+id/enroll_user"
android:layout_width="315dp"
android:layout_height="56dp"
android:layout_gravity="center"
android:layout_marginTop="45dp"
android:background="@drawable/btn_enroll" />
</LinearLayout>
<LinearLayout
android:id="@+id/rl_down"
android:layout_width="match_parent"
android:layout_height="260dp"
android:orientation="vertical"
tools:context=".MainActivity">
<TextView
android:layout_width="315dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginTop="20dp"
android:fontFamily="@res/font/nanumsquarer"
android:gravity="center"
android:text="Recording"
android:textColor="#FFFFFF"
android:textSize="30dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1"
android:gravity="bottom"
android:paddingTop="10dp"
android:layout_marginTop="23dp"
android:orientation="horizontal">
<androidx.constraintlayout.widget.ConstraintLayout
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
android:gravity="center"
android:orientation="vertical">
</androidx.constraintlayout.widget.ConstraintLayout>
<androidx.constraintlayout.widget.ConstraintLayout
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="2"
android:gravity="center"
android:orientation="vertical">
<Button
android:id="@+id/fab"
android:layout_width="137dp"
android:layout_height="137dp"
android:layout_gravity="center"
android:background="@drawable/btn_cam"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
tools:layout_editor_absoluteY="0dp" />
</androidx.constraintlayout.widget.ConstraintLayout>
<androidx.constraintlayout.widget.ConstraintLayout
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
android:gravity="center"
android:orientation="vertical">
<Button
android:id="@+id/img_activity"
android:layout_width="58dp"
android:layout_height="58dp"
android:layout_gravity="center"
android:background="@drawable/btn_plus"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
android:layout_marginBottom="15dp"
tools:layout_editor_absoluteY="92dp" />
</androidx.constraintlayout.widget.ConstraintLayout>
</LinearLayout>
</LinearLayout>
</LinearLayout>
<androidx.coordinatorlayout.widget.CoordinatorLayout
android:id="@+id/coordinatorLayout"
android:layout_width="match_parent"
android:layout_height="wrap_content"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toStartOf="parent"
app:layout_constraintStart_toStartOf="parent">
<com.google.android.material.bottomappbar.BottomAppBar
android:id="@+id/bottom_app_bar"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_gravity="bottom"
app:backgroundTint="#303F9F"
app:fabAlignmentMode="center" />
</androidx.coordinatorlayout.widget.CoordinatorLayout>
</androidx.constraintlayout.widget.ConstraintLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".RecordActivity">
<TextureView
android:id="@+id/textureView1"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentTop="true"
android:layout_centerHorizontal="true" />
</RelativeLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="300dp"
android:layout_height="230dp">
<TextView
android:layout_width="match_parent"
android:layout_height="64dp"
android:layout_gravity="center"
android:background="#193B81"
android:textColor="#FFFFFF"
android:fontFamily="@res/font/nanumsquarer"
android:text="이미지 업로드"
android:gravity="center"
android:textSize="20dp"
/>
</LinearLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:paddingTop="20dp"
android:paddingRight="23dp"
android:paddingLeft="23dp">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:padding="5dp">
<ImageView
android:id="@+id/img_source"
android:layout_width="150dp"
android:layout_height="150dp"/>
<LinearLayout
android:layout_marginLeft="3dp"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="60dp"
android:orientation="horizontal"
android:layout_marginStart="3dp">
<TextView
android:id="@+id/list_file_name"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:gravity="center"
android:text="권주희"
android:textColor="#000000"
android:textSize="20dp"
/>
</LinearLayout>
</LinearLayout>
</LinearLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#6200EE</color>
<color name="colorPrimaryDark">#3700B3</color>
<color name="colorAccent">#03DAC5</color>
</resources>
<resources>
<string name="app_name">CCTedV</string>
</resources>
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>
package com.example.cctedv;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}
\ No newline at end of file
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.6.3'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx1536m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app's APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
#Thu Apr 23 16:51:13 KST 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
ECLIPSE ANDROID PROJECT IMPORT SUMMARY
======================================
Ignored Files:
--------------
The following files were *not* copied into the new Gradle project; you
should evaluate whether these are still needed in your project and if
so manually move them:
* .gitignore
* LICENSE
* README.md
* build.xml
* doc/
* doc/allclasses-frame.html
* doc/allclasses-noframe.html
* doc/constant-values.html
* doc/deprecated-list.html
* doc/help-doc.html
* doc/index-all.html
* doc/index.html
* doc/net/
* doc/net/majorkernelpanic/
* doc/net/majorkernelpanic/streaming/
* doc/net/majorkernelpanic/streaming/MediaStream.html
* doc/net/majorkernelpanic/streaming/Session.Callback.html
* doc/net/majorkernelpanic/streaming/Session.html
* doc/net/majorkernelpanic/streaming/SessionBuilder.html
* doc/net/majorkernelpanic/streaming/Stream.html
* doc/net/majorkernelpanic/streaming/audio/
* doc/net/majorkernelpanic/streaming/audio/AACStream.html
* doc/net/majorkernelpanic/streaming/audio/AMRNBStream.html
* doc/net/majorkernelpanic/streaming/audio/AudioQuality.html
* doc/net/majorkernelpanic/streaming/audio/AudioStream.html
* doc/net/majorkernelpanic/streaming/audio/package-frame.html
* doc/net/majorkernelpanic/streaming/audio/package-summary.html
* doc/net/majorkernelpanic/streaming/audio/package-tree.html
* doc/net/majorkernelpanic/streaming/exceptions/
* doc/net/majorkernelpanic/streaming/exceptions/CameraInUseException.html
* doc/net/majorkernelpanic/streaming/exceptions/ConfNotSupportedException.html
* doc/net/majorkernelpanic/streaming/exceptions/InvalidSurfaceException.html
* doc/net/majorkernelpanic/streaming/exceptions/StorageUnavailableException.html
* doc/net/majorkernelpanic/streaming/exceptions/package-frame.html
* doc/net/majorkernelpanic/streaming/exceptions/package-summary.html
* doc/net/majorkernelpanic/streaming/exceptions/package-tree.html
* doc/net/majorkernelpanic/streaming/gl/
* doc/net/majorkernelpanic/streaming/gl/SurfaceManager.html
* doc/net/majorkernelpanic/streaming/gl/SurfaceView.ViewAspectRatioMeasurer.html
* doc/net/majorkernelpanic/streaming/gl/SurfaceView.html
* doc/net/majorkernelpanic/streaming/gl/TextureManager.html
* doc/net/majorkernelpanic/streaming/gl/package-frame.html
* doc/net/majorkernelpanic/streaming/gl/package-summary.html
* doc/net/majorkernelpanic/streaming/gl/package-tree.html
* doc/net/majorkernelpanic/streaming/hw/
* doc/net/majorkernelpanic/streaming/hw/CodecManager.html
* doc/net/majorkernelpanic/streaming/hw/EncoderDebugger.html
* doc/net/majorkernelpanic/streaming/hw/NV21Convertor.html
* doc/net/majorkernelpanic/streaming/hw/package-frame.html
* doc/net/majorkernelpanic/streaming/hw/package-summary.html
* doc/net/majorkernelpanic/streaming/hw/package-tree.html
* doc/net/majorkernelpanic/streaming/mp4/
* doc/net/majorkernelpanic/streaming/mp4/MP4Config.html
* doc/net/majorkernelpanic/streaming/mp4/MP4Parser.html
* doc/net/majorkernelpanic/streaming/mp4/package-frame.html
* doc/net/majorkernelpanic/streaming/mp4/package-summary.html
* doc/net/majorkernelpanic/streaming/mp4/package-tree.html
* doc/net/majorkernelpanic/streaming/package-frame.html
* doc/net/majorkernelpanic/streaming/package-summary.html
* doc/net/majorkernelpanic/streaming/package-tree.html
* doc/net/majorkernelpanic/streaming/rtcp/
* doc/net/majorkernelpanic/streaming/rtcp/SenderReport.html
* doc/net/majorkernelpanic/streaming/rtcp/package-frame.html
* doc/net/majorkernelpanic/streaming/rtcp/package-summary.html
* doc/net/majorkernelpanic/streaming/rtcp/package-tree.html
* doc/net/majorkernelpanic/streaming/rtp/
* doc/net/majorkernelpanic/streaming/rtp/AACADTSPacketizer.html
* doc/net/majorkernelpanic/streaming/rtp/AACLATMPacketizer.html
* doc/net/majorkernelpanic/streaming/rtp/AMRNBPacketizer.html
* doc/net/majorkernelpanic/streaming/rtp/AbstractPacketizer.html
* doc/net/majorkernelpanic/streaming/rtp/H263Packetizer.html
* doc/net/majorkernelpanic/streaming/rtp/H264Packetizer.html
* doc/net/majorkernelpanic/streaming/rtp/MediaCodecInputStream.html
* doc/net/majorkernelpanic/streaming/rtp/RtpSocket.html
* doc/net/majorkernelpanic/streaming/rtp/package-frame.html
* doc/net/majorkernelpanic/streaming/rtp/package-summary.html
* doc/net/majorkernelpanic/streaming/rtp/package-tree.html
* doc/net/majorkernelpanic/streaming/rtsp/
* doc/net/majorkernelpanic/streaming/rtsp/RtspClient.Callback.html
* doc/net/majorkernelpanic/streaming/rtsp/RtspClient.html
* doc/net/majorkernelpanic/streaming/rtsp/RtspServer.CallbackListener.html
* doc/net/majorkernelpanic/streaming/rtsp/RtspServer.LocalBinder.html
* doc/net/majorkernelpanic/streaming/rtsp/RtspServer.html
* doc/net/majorkernelpanic/streaming/rtsp/UriParser.html
* doc/net/majorkernelpanic/streaming/rtsp/package-frame.html
* doc/net/majorkernelpanic/streaming/rtsp/package-summary.html
* doc/net/majorkernelpanic/streaming/rtsp/package-tree.html
* doc/net/majorkernelpanic/streaming/video/
* doc/net/majorkernelpanic/streaming/video/CodecManager.html
* doc/net/majorkernelpanic/streaming/video/H263Stream.html
* doc/net/majorkernelpanic/streaming/video/H264Stream.html
* doc/net/majorkernelpanic/streaming/video/VideoQuality.html
* doc/net/majorkernelpanic/streaming/video/VideoStream.html
* doc/net/majorkernelpanic/streaming/video/package-frame.html
* doc/net/majorkernelpanic/streaming/video/package-summary.html
* doc/net/majorkernelpanic/streaming/video/package-tree.html
* doc/overview-frame.html
* doc/overview-summary.html
* doc/overview-tree.html
* doc/package-list
* doc/resources/
* doc/resources/background.gif
* doc/resources/tab.gif
* doc/resources/titlebar.gif
* doc/resources/titlebar_end.gif
* doc/serialized-form.html
* doc/stylesheet.css
* pom.xml
* proguard-project.txt
Moved Files:
------------
Android Gradle projects use a different directory structure than ADT
Eclipse projects. Here's how the projects were restructured:
* AndroidManifest.xml => libstreaming/src/main/AndroidManifest.xml
* res/ => libstreaming/src/main/res/
* src/ => libstreaming/src/main/java/
Next Steps:
-----------
You can now build the project. The Gradle project needs network
connectivity to download dependencies.
Bugs:
-----
If for some reason your project does not build, and you determine that
it is due to a bug or limitation of the Eclipse to Gradle importer,
please file a bug at http://b.android.com with category
Component-Tools.
(This import summary is for your information only, and can be deleted
after import once you are satisfied with the results.)
apply plugin: 'com.android.library'
android {
compileSdkVersion 28
buildToolsVersion "28.0.3"
defaultConfig {
minSdkVersion 22
targetSdkVersion 28
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
}
}
}
/**
* Automatically generated file. DO NOT MODIFY
*/
package net.majorkernelpanic.streaming;
public final class BuildConfig {
public static final boolean DEBUG = Boolean.parseBoolean("true");
public static final String LIBRARY_PACKAGE_NAME = "net.majorkernelpanic.streaming";
/**
* @deprecated APPLICATION_ID is misleading in libraries. For the library package name use LIBRARY_PACKAGE_NAME
*/
@Deprecated
public static final String APPLICATION_ID = "net.majorkernelpanic.streaming";
public static final String BUILD_TYPE = "debug";
public static final String FLAVOR = "";
public static final int VERSION_CODE = 40;
public static final String VERSION_NAME = "4.0";
}
/**
* Automatically generated file. DO NOT MODIFY
*/
package net.majorkernelpanic.streaming;
public final class BuildConfig {
public static final boolean DEBUG = Boolean.parseBoolean("true");
public static final String LIBRARY_PACKAGE_NAME = "net.majorkernelpanic.streaming";
/**
* @deprecated APPLICATION_ID is misleading in libraries. For the library package name use LIBRARY_PACKAGE_NAME
*/
@Deprecated
public static final String APPLICATION_ID = "net.majorkernelpanic.streaming";
public static final String BUILD_TYPE = "debug";
public static final String FLAVOR = "";
public static final int VERSION_CODE = 40;
public static final String VERSION_NAME = "4.0";
}
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="net.majorkernelpanic.streaming"
android:versionCode="40"
android:versionName="4.0" >
<uses-sdk
android:minSdkVersion="22"
android:targetSdkVersion="28" />
</manifest>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="net.majorkernelpanic.streaming"
android:versionCode="40"
android:versionName="4.0" >
<uses-sdk
android:minSdkVersion="22"
android:targetSdkVersion="28" />
</manifest>
\ No newline at end of file
[{"outputType":{"type":"AAPT_FRIENDLY_MERGED_MANIFESTS"},"apkData":{"type":"MAIN","splits":[],"versionCode":40,"versionName":"4.0","enabled":true,"outputFile":"libstreaming-debug.aar","fullName":"debug","baseName":"debug","dirName":""},"path":"AndroidManifest.xml","properties":{"packageId":"net.majorkernelpanic.streaming","split":""}}]
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/jniLibs"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/jniLibs"/></dataSet></merger>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/jniLibs"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/jniLibs"/></dataSet></merger>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/shaders"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/shaders"/></dataSet></merger>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/shaders"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/shaders"/></dataSet></merger>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/assets"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/intermediates/shader_assets/debug/out"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/assets"/></dataSet></merger>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/assets"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/intermediates/shader_assets/debug/out"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/assets"/></dataSet></merger>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<merger version="3"><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="main$Generated" generated="true" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/res"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/generated/res/rs/debug"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/generated/res/resValues/debug"/></dataSet><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="main" generated-set="main$Generated" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/res"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/generated/res/rs/debug"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/generated/res/resValues/debug"/></dataSet><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="debug$Generated" generated="true" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/res"/></dataSet><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="debug" generated-set="debug$Generated" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/res"/></dataSet><mergedItems/></merger>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="net.majorkernelpanic.streaming"
android:versionCode="40"
android:versionName="4.0" >
<uses-sdk
android:minSdkVersion="22"
android:targetSdkVersion="28" />
</manifest>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="net.majorkernelpanic.streaming"
android:versionCode="40"
android:versionName="4.0" >
<uses-sdk
android:minSdkVersion="22"
android:targetSdkVersion="28" />
</manifest>
\ No newline at end of file
1<?xml version="1.0" encoding="utf-8"?>
2<manifest xmlns:android="http://schemas.android.com/apk/res/android"
3 package="net.majorkernelpanic.streaming"
4 android:versionCode="40"
5 android:versionName="4.0" >
6
7 <uses-sdk
7-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
8 android:minSdkVersion="22"
8-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
9 android:targetSdkVersion="28" />
9-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:7:9-38
10
11</manifest>
1<?xml version="1.0" encoding="utf-8"?>
2<manifest xmlns:android="http://schemas.android.com/apk/res/android"
3 package="net.majorkernelpanic.streaming"
4 android:versionCode="40"
5 android:versionName="4.0" >
6
7 <uses-sdk
7-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
8 android:minSdkVersion="22"
8-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
9 android:targetSdkVersion="28" />
9-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:7:9-38
10
11</manifest>
[{"outputType":{"type":"MERGED_MANIFESTS"},"apkData":{"type":"MAIN","splits":[],"versionCode":40,"versionName":"4.0","enabled":true,"outputFile":"libstreaming-debug.aar","fullName":"debug","baseName":"debug","dirName":""},"path":"../../library_manifest/debug/AndroidManifest.xml","properties":{"packageId":"net.majorkernelpanic.streaming","split":""}}]
\ No newline at end of file
[{"outputType":{"type":"MERGED_MANIFESTS"},"apkData":{"type":"MAIN","splits":[],"versionCode":40,"versionName":"4.0","enabled":true,"outputFile":"libstreaming-debug.aar","fullName":"debug","baseName":"debug","dirName":""},"path":"../../library_manifest/debug/AndroidManifest.xml","properties":{"packageId":"net.majorkernelpanic.streaming","split":""}}]
\ No newline at end of file
-- Merging decision tree log ---
manifest
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
package
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:2:5-45
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
android:versionName
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:4:5-30
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
xmlns:android
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:11-69
android:versionCode
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:3:5-29
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
uses-sdk
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
android:targetSdkVersion
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:7:9-38
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
android:minSdkVersion
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
-- Merging decision tree log ---
manifest
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
package
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:2:5-45
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
android:versionName
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:4:5-30
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
xmlns:android
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:11-69
android:versionCode
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:3:5-29
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
uses-sdk
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
android:targetSdkVersion
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:7:9-38
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
android:minSdkVersion
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="net.majorkernelpanic.streaming"
android:versionCode="40"
android:versionName="4.0" >
<uses-sdk
android:targetSdkVersion="19" />
</manifest>
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming;
import java.io.IOException;
import java.io.OutputStream;
import java.net.InetAddress;
import java.util.Random;
import net.majorkernelpanic.streaming.audio.AudioStream;
import net.majorkernelpanic.streaming.rtp.AbstractPacketizer;
import net.majorkernelpanic.streaming.video.VideoStream;
import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaRecorder;
import android.net.LocalServerSocket;
import android.net.LocalSocket;
import android.net.LocalSocketAddress;
import android.os.Build;
import android.os.ParcelFileDescriptor;
import android.util.Log;
/**
* A MediaRecorder that streams what it records using a packetizer from the RTP package.
* You can't use this class directly !
*/
public abstract class MediaStream implements Stream {
protected static final String TAG = "MediaStream";
/** Raw audio/video will be encoded using the MediaRecorder API. */
public static final byte MODE_MEDIARECORDER_API = 0x01;
/** Raw audio/video will be encoded using the MediaCodec API with buffers. */
public static final byte MODE_MEDIACODEC_API = 0x02;
/** Raw audio/video will be encoded using the MediaCode API with a surface. */
public static final byte MODE_MEDIACODEC_API_2 = 0x05;
/** A LocalSocket will be used to feed the MediaRecorder object */
public static final byte PIPE_API_LS = 0x01;
/** A ParcelFileDescriptor will be used to feed the MediaRecorder object */
public static final byte PIPE_API_PFD = 0x02;
/** Prefix that will be used for all shared preferences saved by libstreaming */
protected static final String PREF_PREFIX = "libstreaming-";
/** The packetizer that will read the output of the camera and send RTP packets over the networked. */
protected AbstractPacketizer mPacketizer = null;
protected static byte sSuggestedMode = MODE_MEDIARECORDER_API;
protected byte mMode, mRequestedMode;
/**
* Starting lollipop the LocalSocket API cannot be used to feed a MediaRecorder object.
* You can force what API to use to create the pipe that feeds it with this constant
* by using {@link #PIPE_API_LS} and {@link #PIPE_API_PFD}.
*/
protected final static byte sPipeApi;
protected boolean mStreaming = false, mConfigured = false;
protected int mRtpPort = 0, mRtcpPort = 0;
protected byte mChannelIdentifier = 0;
protected OutputStream mOutputStream = null;
protected InetAddress mDestination;
protected ParcelFileDescriptor[] mParcelFileDescriptors;
protected ParcelFileDescriptor mParcelRead;
protected ParcelFileDescriptor mParcelWrite;
protected LocalSocket mReceiver, mSender = null;
private LocalServerSocket mLss = null;
private int mSocketId;
private int mTTL = 64;
protected MediaRecorder mMediaRecorder;
protected MediaCodec mMediaCodec;
static {
// We determine whether or not the MediaCodec API should be used
try {
Class.forName("android.media.MediaCodec");
// Will be set to MODE_MEDIACODEC_API at some point...
sSuggestedMode = MODE_MEDIACODEC_API;
Log.i(TAG,"Phone supports the MediaCoded API");
} catch (ClassNotFoundException e) {
sSuggestedMode = MODE_MEDIARECORDER_API;
Log.i(TAG,"Phone does not support the MediaCodec API");
}
// Starting lollipop, the LocalSocket API cannot be used anymore to feed
// a MediaRecorder object for security reasons
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.KITKAT_WATCH) {
sPipeApi = PIPE_API_PFD;
} else {
sPipeApi = PIPE_API_LS;
}
}
public MediaStream() {
mRequestedMode = sSuggestedMode;
mMode = sSuggestedMode;
}
/**
* Sets the destination IP address of the stream.
* @param dest The destination address of the stream
*/
public void setDestinationAddress(InetAddress dest) {
mDestination = dest;
}
/**
* Sets the destination ports of the stream.
* If an odd number is supplied for the destination port then the next
* lower even number will be used for RTP and it will be used for RTCP.
* If an even number is supplied, it will be used for RTP and the next odd
* number will be used for RTCP.
* @param dport The destination port
*/
public void setDestinationPorts(int dport) {
if (dport % 2 == 1) {
mRtpPort = dport-1;
mRtcpPort = dport;
} else {
mRtpPort = dport;
mRtcpPort = dport+1;
}
}
/**
* Sets the destination ports of the stream.
* @param rtpPort Destination port that will be used for RTP
* @param rtcpPort Destination port that will be used for RTCP
*/
public void setDestinationPorts(int rtpPort, int rtcpPort) {
mRtpPort = rtpPort;
mRtcpPort = rtcpPort;
mOutputStream = null;
}
/**
* If a TCP is used as the transport protocol for the RTP session,
* the output stream to which RTP packets will be written to must
* be specified with this method.
*/
public void setOutputStream(OutputStream stream, byte channelIdentifier) {
mOutputStream = stream;
mChannelIdentifier = channelIdentifier;
}
/**
* Sets the Time To Live of packets sent over the network.
* @param ttl The time to live
* @throws IOException
*/
public void setTimeToLive(int ttl) throws IOException {
mTTL = ttl;
}
/**
* Returns a pair of destination ports, the first one is the
* one used for RTP and the second one is used for RTCP.
**/
public int[] getDestinationPorts() {
return new int[] {
mRtpPort,
mRtcpPort
};
}
/**
* Returns a pair of source ports, the first one is the
* one used for RTP and the second one is used for RTCP.
**/
public int[] getLocalPorts() {
return mPacketizer.getRtpSocket().getLocalPorts();
}
/**
* Sets the streaming method that will be used.
*
* If the mode is set to {@link #MODE_MEDIARECORDER_API}, raw audio/video will be encoded
* using the MediaRecorder API. <br />
*
* If the mode is set to {@link #MODE_MEDIACODEC_API} or to {@link #MODE_MEDIACODEC_API_2},
* audio/video will be encoded with using the MediaCodec. <br />
*
* The {@link #MODE_MEDIACODEC_API_2} mode only concerns {@link VideoStream}, it makes
* use of the createInputSurface() method of the MediaCodec API (Android 4.3 is needed there). <br />
*
* @param mode Can be {@link #MODE_MEDIARECORDER_API}, {@link #MODE_MEDIACODEC_API} or {@link #MODE_MEDIACODEC_API_2}
*/
public void setStreamingMethod(byte mode) {
mRequestedMode = mode;
}
/**
* Returns the streaming method in use, call this after
* {@link #configure()} to get an accurate response.
*/
public byte getStreamingMethod() {
return mMode;
}
/**
* Returns the packetizer associated with the {@link MediaStream}.
* @return The packetizer
*/
public AbstractPacketizer getPacketizer() {
return mPacketizer;
}
/**
* Returns an approximation of the bit rate consumed by the stream in bit per seconde.
*/
public long getBitrate() {
return !mStreaming ? 0 : mPacketizer.getRtpSocket().getBitrate();
}
/**
* Indicates if the {@link MediaStream} is streaming.
* @return A boolean indicating if the {@link MediaStream} is streaming
*/
public boolean isStreaming() {
return mStreaming;
}
/**
* Configures the stream with the settings supplied with
* {@link VideoStream#setVideoQuality(net.majorkernelpanic.streaming.video.VideoQuality)}
* for a {@link VideoStream} and {@link AudioStream#setAudioQuality(net.majorkernelpanic.streaming.audio.AudioQuality)}
* for a {@link AudioStream}.
*/
public synchronized void configure() throws IllegalStateException, IOException {
if (mStreaming) throw new IllegalStateException("Can't be called while streaming.");
if (mPacketizer != null) {
mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort);
mPacketizer.getRtpSocket().setOutputStream(mOutputStream, mChannelIdentifier);
}
mMode = mRequestedMode;
mConfigured = true;
}
/** Starts the stream. */
public synchronized void start() throws IllegalStateException, IOException {
if (mDestination==null)
throw new IllegalStateException("No destination ip address set for the stream !");
if (mRtpPort<=0 || mRtcpPort<=0)
throw new IllegalStateException("No destination ports set for the stream !");
mPacketizer.setTimeToLive(mTTL);
if (mMode != MODE_MEDIARECORDER_API) {
encodeWithMediaCodec();
} else {
encodeWithMediaRecorder();
}
}
/** Stops the stream. */
@SuppressLint("NewApi")
public synchronized void stop() {
if (mStreaming) {
try {
if (mMode==MODE_MEDIARECORDER_API) {
mMediaRecorder.stop();
mMediaRecorder.release();
mMediaRecorder = null;
closeSockets();
mPacketizer.stop();
} else {
mPacketizer.stop();
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
}
} catch (Exception e) {
e.printStackTrace();
}
mStreaming = false;
}
}
protected abstract void encodeWithMediaRecorder() throws IOException;
protected abstract void encodeWithMediaCodec() throws IOException;
/**
* Returns a description of the stream using SDP.
* This method can only be called after {@link Stream#configure()}.
* @throws IllegalStateException Thrown when {@link Stream#configure()} was not called.
*/
public abstract String getSessionDescription();
/**
* Returns the SSRC of the underlying {@link net.majorkernelpanic.streaming.rtp.RtpSocket}.
* @return the SSRC of the stream
*/
public int getSSRC() {
return getPacketizer().getSSRC();
}
protected void createSockets() throws IOException {
if (sPipeApi == PIPE_API_LS) {
final String LOCAL_ADDR = "net.majorkernelpanic.streaming-";
for (int i=0;i<10;i++) {
try {
mSocketId = new Random().nextInt();
mLss = new LocalServerSocket(LOCAL_ADDR+mSocketId);
break;
} catch (IOException e1) {}
}
mReceiver = new LocalSocket();
mReceiver.connect( new LocalSocketAddress(LOCAL_ADDR+mSocketId));
mReceiver.setReceiveBufferSize(500000);
mReceiver.setSoTimeout(3000);
mSender = mLss.accept();
mSender.setSendBufferSize(500000);
} else {
Log.e(TAG, "parcelFileDescriptors createPipe version = Lollipop");
mParcelFileDescriptors = ParcelFileDescriptor.createPipe();
mParcelRead = new ParcelFileDescriptor(mParcelFileDescriptors[0]);
mParcelWrite = new ParcelFileDescriptor(mParcelFileDescriptors[1]);
}
}
protected void closeSockets() {
if (sPipeApi == PIPE_API_LS) {
try {
mReceiver.close();
} catch (Exception e) {
e.printStackTrace();
}
try {
mSender.close();
} catch (Exception e) {
e.printStackTrace();
}
try {
mLss.close();
} catch (Exception e) {
e.printStackTrace();
}
mLss = null;
mSender = null;
mReceiver = null;
} else {
try {
if (mParcelRead != null) {
mParcelRead.close();
}
} catch (Exception e) {
e.printStackTrace();
}
try {
if (mParcelWrite != null) {
mParcelWrite.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.concurrent.CountDownLatch;
import net.majorkernelpanic.streaming.audio.AudioQuality;
import net.majorkernelpanic.streaming.audio.AudioStream;
import net.majorkernelpanic.streaming.exceptions.CameraInUseException;
import net.majorkernelpanic.streaming.exceptions.ConfNotSupportedException;
import net.majorkernelpanic.streaming.exceptions.InvalidSurfaceException;
import net.majorkernelpanic.streaming.exceptions.StorageUnavailableException;
import net.majorkernelpanic.streaming.gl.SurfaceView;
import net.majorkernelpanic.streaming.rtsp.RtspClient;
import net.majorkernelpanic.streaming.video.VideoQuality;
import net.majorkernelpanic.streaming.video.VideoStream;
import android.hardware.Camera.CameraInfo;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
/**
* You should instantiate this class with the {@link SessionBuilder}.<br />
* This is the class you will want to use to stream audio and or video to some peer using RTP.<br />
*
* It holds a {@link VideoStream} and a {@link AudioStream} together and provides
* synchronous and asynchronous functions to start and stop those steams.
* You should implement a callback interface {@link Callback} to receive notifications and error reports.<br />
*
* If you want to stream to a RTSP server, you will need an instance of this class and hand it to a {@link RtspClient}.
*
* If you don't use the RTSP protocol, you will still need to send a session description to the receiver
* for him to be able to decode your audio/video streams. You can obtain this session description by calling
* {@link #configure()} or {@link #syncConfigure()} to configure the session with its parameters
* (audio samplingrate, video resolution) and then {@link Session#getSessionDescription()}.<br />
*
* See the example 2 here: https://github.com/fyhertz/libstreaming-examples to
* see an example of how to get a SDP.<br />
*
* See the example 3 here: https://github.com/fyhertz/libstreaming-examples to
* see an example of how to stream to a RTSP server.<br />
*
*/
public class Session {
public final static String TAG = "Session";
public final static int STREAM_VIDEO = 0x01;
public final static int STREAM_AUDIO = 0x00;
/** Some app is already using a camera (Camera.open() has failed). */
public final static int ERROR_CAMERA_ALREADY_IN_USE = 0x00;
/** The phone may not support some streaming parameters that you are trying to use (bit rate, frame rate, resolution...). */
public final static int ERROR_CONFIGURATION_NOT_SUPPORTED = 0x01;
/**
* The internal storage of the phone is not ready.
* libstreaming tried to store a test file on the sdcard but couldn't.
* See H264Stream and AACStream to find out why libstreaming would want to something like that.
*/
public final static int ERROR_STORAGE_NOT_READY = 0x02;
/** The phone has no flash. */
public final static int ERROR_CAMERA_HAS_NO_FLASH = 0x03;
/** The supplied SurfaceView is not a valid surface, or has not been created yet. */
public final static int ERROR_INVALID_SURFACE = 0x04;
/**
* The destination set with {@link Session#setDestination(String)} could not be resolved.
* May mean that the phone has no access to the internet, or that the DNS server could not
* resolved the host name.
*/
public final static int ERROR_UNKNOWN_HOST = 0x05;
/**
* Some other error occurred !
*/
public final static int ERROR_OTHER = 0x06;
private String mOrigin;
private String mDestination;
private int mTimeToLive = 64;
private long mTimestamp;
private AudioStream mAudioStream = null;
private VideoStream mVideoStream = null;
private Callback mCallback;
private Handler mMainHandler;
private Handler mHandler;
/**
* Creates a streaming session that can be customized by adding tracks.
*/
public Session() {
long uptime = System.currentTimeMillis();
HandlerThread thread = new HandlerThread("net.majorkernelpanic.streaming.Session");
thread.start();
mHandler = new Handler(thread.getLooper());
mMainHandler = new Handler(Looper.getMainLooper());
mTimestamp = (uptime/1000)<<32 & (((uptime-((uptime/1000)*1000))>>32)/1000); // NTP timestamp
mOrigin = "127.0.0.1";
}
/**
* The callback interface you need to implement to get some feedback
* Those will be called from the UI thread.
*/
public interface Callback {
/**
* Called periodically to inform you on the bandwidth
* consumption of the streams when streaming.
*/
public void onBitrateUpdate(long bitrate);
/** Called when some error occurs. */
public void onSessionError(int reason, int streamType, Exception e);
/**
* Called when the previw of the {@link VideoStream}
* has correctly been started.
* If an error occurs while starting the preview,
* {@link Callback#onSessionError(int, int, Exception)} will be
* called instead of {@link Callback#onPreviewStarted()}.
*/
public void onPreviewStarted();
/**
* Called when the session has correctly been configured
* after calling {@link Session#configure()}.
* If an error occurs while configuring the {@link Session},
* {@link Callback#onSessionError(int, int, Exception)} will be
* called instead of {@link Callback#onSessionConfigured()}.
*/
public void onSessionConfigured();
/**
* Called when the streams of the session have correctly been started.
* If an error occurs while starting the {@link Session},
* {@link Callback#onSessionError(int, int, Exception)} will be
* called instead of {@link Callback#onSessionStarted()}.
*/
public void onSessionStarted();
/** Called when the stream of the session have been stopped. */
public void onSessionStopped();
}
/** You probably don't need to use that directly, use the {@link SessionBuilder}. */
void addAudioTrack(AudioStream track) {
removeAudioTrack();
mAudioStream = track;
}
/** You probably don't need to use that directly, use the {@link SessionBuilder}. */
void addVideoTrack(VideoStream track) {
removeVideoTrack();
mVideoStream = track;
}
/** You probably don't need to use that directly, use the {@link SessionBuilder}. */
void removeAudioTrack() {
if (mAudioStream != null) {
mAudioStream.stop();
mAudioStream = null;
}
}
/** You probably don't need to use that directly, use the {@link SessionBuilder}. */
void removeVideoTrack() {
if (mVideoStream != null) {
mVideoStream.stopPreview();
mVideoStream = null;
}
}
/** Returns the underlying {@link AudioStream} used by the {@link Session}. */
public AudioStream getAudioTrack() {
return mAudioStream;
}
/** Returns the underlying {@link VideoStream} used by the {@link Session}. */
public VideoStream getVideoTrack() {
return mVideoStream;
}
/**
* Sets the callback interface that will be called by the {@link Session}.
* @param callback The implementation of the {@link Callback} interface
*/
public void setCallback(Callback callback) {
mCallback = callback;
}
/**
* The origin address of the session.
* It appears in the session description.
* @param origin The origin address
*/
public void setOrigin(String origin) {
mOrigin = origin;
}
/**
* The destination address for all the streams of the session. <br />
* Changes will be taken into account the next time you start the session.
* @param destination The destination address
*/
public void setDestination(String destination) {
mDestination = destination;
}
/**
* Set the TTL of all packets sent during the session. <br />
* Changes will be taken into account the next time you start the session.
* @param ttl The Time To Live
*/
public void setTimeToLive(int ttl) {
mTimeToLive = ttl;
}
/**
* Sets the configuration of the stream. <br />
* You can call this method at any time and changes will take
* effect next time you call {@link #configure()}.
* @param quality Quality of the stream
*/
public void setVideoQuality(VideoQuality quality) {
if (mVideoStream != null) {
mVideoStream.setVideoQuality(quality);
}
}
/**
* Sets a Surface to show a preview of recorded media (video). <br />
* You can call this method at any time and changes will take
* effect next time you call {@link #start()} or {@link #startPreview()}.
*/
public void setSurfaceView(final SurfaceView view) {
mHandler.post(new Runnable() {
@Override
public void run() {
if (mVideoStream != null) {
mVideoStream.setSurfaceView(view);
}
}
});
}
/**
* Sets the orientation of the preview. <br />
* You can call this method at any time and changes will take
* effect next time you call {@link #configure()}.
* @param orientation The orientation of the preview
*/
public void setPreviewOrientation(int orientation) {
if (mVideoStream != null) {
mVideoStream.setPreviewOrientation(orientation);
}
}
/**
* Sets the configuration of the stream. <br />
* You can call this method at any time and changes will take
* effect next time you call {@link #configure()}.
* @param quality Quality of the stream
*/
public void setAudioQuality(AudioQuality quality) {
if (mAudioStream != null) {
mAudioStream.setAudioQuality(quality);
}
}
/**
* Returns the {@link Callback} interface that was set with
* {@link #setCallback(Callback)} or null if none was set.
*/
public Callback getCallback() {
return mCallback;
}
/**
* Returns a Session Description that can be stored in a file or sent to a client with RTSP.
* @return The Session Description.
* @throws IllegalStateException Thrown when {@link #setDestination(String)} has never been called.
*/
public String getSessionDescription() {
StringBuilder sessionDescription = new StringBuilder();
if (mDestination==null) {
throw new IllegalStateException("setDestination() has not been called !");
}
sessionDescription.append("v=0\r\n");
// TODO: Add IPV6 support
sessionDescription.append("o=- "+mTimestamp+" "+mTimestamp+" IN IP4 "+mOrigin+"\r\n");
sessionDescription.append("s=Unnamed\r\n");
sessionDescription.append("i=N/A\r\n");
sessionDescription.append("c=IN IP4 "+mDestination+"\r\n");
// t=0 0 means the session is permanent (we don't know when it will stop)
sessionDescription.append("t=0 0\r\n");
sessionDescription.append("a=recvonly\r\n");
// Prevents two different sessions from using the same peripheral at the same time
if (mAudioStream != null) {
sessionDescription.append(mAudioStream.getSessionDescription());
sessionDescription.append("a=control:trackID="+0+"\r\n");
}
if (mVideoStream != null) {
sessionDescription.append(mVideoStream.getSessionDescription());
sessionDescription.append("a=control:trackID="+1+"\r\n");
}
return sessionDescription.toString();
}
/** Returns the destination set with {@link #setDestination(String)}. */
public String getDestination() {
return mDestination;
}
/** Returns an approximation of the bandwidth consumed by the session in bit per second. */
public long getBitrate() {
long sum = 0;
if (mAudioStream != null) sum += mAudioStream.getBitrate();
if (mVideoStream != null) sum += mVideoStream.getBitrate();
return sum;
}
/** Indicates if a track is currently running. */
public boolean isStreaming() {
return (mAudioStream!=null && mAudioStream.isStreaming()) || (mVideoStream!=null && mVideoStream.isStreaming());
}
/**
* Configures all streams of the session.
**/
public void configure() {
mHandler.post(new Runnable() {
@Override
public void run() {
try {
syncConfigure();
} catch (Exception e) {};
}
});
}
/**
* Does the same thing as {@link #configure()}, but in a synchronous manner. <br />
* Throws exceptions in addition to calling a callback
* {@link Callback#onSessionError(int, int, Exception)} when
* an error occurs.
**/
public void syncConfigure()
throws CameraInUseException,
StorageUnavailableException,
ConfNotSupportedException,
InvalidSurfaceException,
RuntimeException,
IOException {
for (int id=0;id<2;id++) {
Stream stream = id==0 ? mAudioStream : mVideoStream;
if (stream!=null && !stream.isStreaming()) {
try {
stream.configure();
} catch (CameraInUseException e) {
postError(ERROR_CAMERA_ALREADY_IN_USE , id, e);
throw e;
} catch (StorageUnavailableException e) {
postError(ERROR_STORAGE_NOT_READY , id, e);
throw e;
} catch (ConfNotSupportedException e) {
postError(ERROR_CONFIGURATION_NOT_SUPPORTED , id, e);
throw e;
} catch (InvalidSurfaceException e) {
postError(ERROR_INVALID_SURFACE , id, e);
throw e;
} catch (IOException e) {
postError(ERROR_OTHER, id, e);
throw e;
} catch (RuntimeException e) {
postError(ERROR_OTHER, id, e);
throw e;
}
}
}
postSessionConfigured();
}
/**
* Asynchronously starts all streams of the session.
**/
public void start() {
mHandler.post(new Runnable() {
@Override
public void run() {
try {
syncStart();
} catch (Exception e) {}
}
});
}
/**
* Starts a stream in a synchronous manner. <br />
* Throws exceptions in addition to calling a callback.
* @param id The id of the stream to start
**/
public void syncStart(int id)
throws CameraInUseException,
StorageUnavailableException,
ConfNotSupportedException,
InvalidSurfaceException,
UnknownHostException,
IOException {
Stream stream = id==0 ? mAudioStream : mVideoStream;
if (stream!=null && !stream.isStreaming()) {
try {
InetAddress destination = InetAddress.getByName(mDestination);
stream.setTimeToLive(mTimeToLive);
stream.setDestinationAddress(destination);
stream.start();
if (getTrack(1-id) == null || getTrack(1-id).isStreaming()) {
postSessionStarted();
}
if (getTrack(1-id) == null || !getTrack(1-id).isStreaming()) {
mHandler.post(mUpdateBitrate);
}
} catch (UnknownHostException e) {
postError(ERROR_UNKNOWN_HOST, id, e);
throw e;
} catch (CameraInUseException e) {
postError(ERROR_CAMERA_ALREADY_IN_USE , id, e);
throw e;
} catch (StorageUnavailableException e) {
postError(ERROR_STORAGE_NOT_READY , id, e);
throw e;
} catch (ConfNotSupportedException e) {
postError(ERROR_CONFIGURATION_NOT_SUPPORTED , id, e);
throw e;
} catch (InvalidSurfaceException e) {
postError(ERROR_INVALID_SURFACE , id, e);
throw e;
} catch (IOException e) {
postError(ERROR_OTHER, id, e);
throw e;
} catch (RuntimeException e) {
postError(ERROR_OTHER, id, e);
throw e;
}
}
}
/**
* Does the same thing as {@link #start()}, but in a synchronous manner. <br />
* Throws exceptions in addition to calling a callback.
**/
public void syncStart()
throws CameraInUseException,
StorageUnavailableException,
ConfNotSupportedException,
InvalidSurfaceException,
UnknownHostException,
IOException {
syncStart(1);
try {
syncStart(0);
} catch (RuntimeException e) {
syncStop(1);
throw e;
} catch (IOException e) {
syncStop(1);
throw e;
}
}
/** Stops all existing streams. */
public void stop() {
mHandler.post(new Runnable() {
@Override
public void run() {
syncStop();
}
});
}
/**
* Stops one stream in a synchronous manner.
* @param id The id of the stream to stop
**/
private void syncStop(final int id) {
Stream stream = id==0 ? mAudioStream : mVideoStream;
if (stream!=null) {
stream.stop();
}
}
/** Stops all existing streams in a synchronous manner. */
public void syncStop() {
syncStop(0);
syncStop(1);
postSessionStopped();
}
/**
* Asynchronously starts the camera preview. <br />
* You should of course pass a {@link SurfaceView} to {@link #setSurfaceView(SurfaceView)}
* before calling this method. Otherwise, the {@link Callback#onSessionError(int, int, Exception)}
* callback will be called with {@link #ERROR_INVALID_SURFACE}.
*/
public void startPreview() {
mHandler.post(new Runnable() {
@Override
public void run() {
if (mVideoStream != null) {
try {
mVideoStream.startPreview();
postPreviewStarted();
mVideoStream.configure();
} catch (CameraInUseException e) {
postError(ERROR_CAMERA_ALREADY_IN_USE , STREAM_VIDEO, e);
} catch (ConfNotSupportedException e) {
postError(ERROR_CONFIGURATION_NOT_SUPPORTED , STREAM_VIDEO, e);
} catch (InvalidSurfaceException e) {
postError(ERROR_INVALID_SURFACE , STREAM_VIDEO, e);
} catch (RuntimeException e) {
postError(ERROR_OTHER, STREAM_VIDEO, e);
} catch (StorageUnavailableException e) {
postError(ERROR_STORAGE_NOT_READY, STREAM_VIDEO, e);
} catch (IOException e) {
postError(ERROR_OTHER, STREAM_VIDEO, e);
}
}
}
});
}
/**
* Asynchronously stops the camera preview.
*/
public void stopPreview() {
mHandler.post(new Runnable() {
@Override
public void run() {
if (mVideoStream != null) {
mVideoStream.stopPreview();
}
}
});
}
/** Switch between the front facing and the back facing camera of the phone. <br />
* If {@link #startPreview()} has been called, the preview will be briefly interrupted. <br />
* If {@link #start()} has been called, the stream will be briefly interrupted.<br />
* To find out which camera is currently selected, use {@link #getCamera()}
**/
public void switchCamera() {
mHandler.post(new Runnable() {
@Override
public void run() {
if (mVideoStream != null) {
try {
mVideoStream.switchCamera();
postPreviewStarted();
} catch (CameraInUseException e) {
postError(ERROR_CAMERA_ALREADY_IN_USE , STREAM_VIDEO, e);
} catch (ConfNotSupportedException e) {
postError(ERROR_CONFIGURATION_NOT_SUPPORTED , STREAM_VIDEO, e);
} catch (InvalidSurfaceException e) {
postError(ERROR_INVALID_SURFACE , STREAM_VIDEO, e);
} catch (IOException e) {
postError(ERROR_OTHER, STREAM_VIDEO, e);
} catch (RuntimeException e) {
postError(ERROR_OTHER, STREAM_VIDEO, e);
}
}
}
});
}
/**
* Returns the id of the camera currently selected. <br />
* It can be either {@link CameraInfo#CAMERA_FACING_BACK} or
* {@link CameraInfo#CAMERA_FACING_FRONT}.
*/
public int getCamera() {
return mVideoStream != null ? mVideoStream.getCamera() : 0;
}
/**
* Toggles the LED of the phone if it has one.
* You can get the current state of the flash with
* {@link Session#getVideoTrack()} and {@link VideoStream#getFlashState()}.
**/
public void toggleFlash() {
mHandler.post(new Runnable() {
@Override
public void run() {
if (mVideoStream != null) {
try {
mVideoStream.toggleFlash();
} catch (RuntimeException e) {
postError(ERROR_CAMERA_HAS_NO_FLASH, STREAM_VIDEO, e);
}
}
}
});
}
/** Deletes all existing tracks & release associated resources. */
public void release() {
removeAudioTrack();
removeVideoTrack();
mHandler.getLooper().quit();
}
private void postPreviewStarted() {
mMainHandler.post(new Runnable() {
@Override
public void run() {
if (mCallback != null) {
mCallback.onPreviewStarted();
}
}
});
}
private void postSessionConfigured() {
mMainHandler.post(new Runnable() {
@Override
public void run() {
if (mCallback != null) {
mCallback.onSessionConfigured();
}
}
});
}
private void postSessionStarted() {
mMainHandler.post(new Runnable() {
@Override
public void run() {
if (mCallback != null) {
mCallback.onSessionStarted();
}
}
});
}
private void postSessionStopped() {
mMainHandler.post(new Runnable() {
@Override
public void run() {
if (mCallback != null) {
mCallback.onSessionStopped();
}
}
});
}
private void postError(final int reason, final int streamType,final Exception e) {
mMainHandler.post(new Runnable() {
@Override
public void run() {
if (mCallback != null) {
mCallback.onSessionError(reason, streamType, e);
}
}
});
}
private void postBitRate(final long bitrate) {
mMainHandler.post(new Runnable() {
@Override
public void run() {
if (mCallback != null) {
mCallback.onBitrateUpdate(bitrate);
}
}
});
}
private Runnable mUpdateBitrate = new Runnable() {
@Override
public void run() {
if (isStreaming()) {
postBitRate(getBitrate());
mHandler.postDelayed(mUpdateBitrate, 500);
} else {
postBitRate(0);
}
}
};
public boolean trackExists(int id) {
if (id==0)
return mAudioStream!=null;
else
return mVideoStream!=null;
}
public Stream getTrack(int id) {
if (id==0)
return mAudioStream;
else
return mVideoStream;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming;
import java.io.IOException;
import java.net.InetAddress;
import net.majorkernelpanic.streaming.audio.AACStream;
import net.majorkernelpanic.streaming.audio.AMRNBStream;
import net.majorkernelpanic.streaming.audio.AudioQuality;
import net.majorkernelpanic.streaming.audio.AudioStream;
import net.majorkernelpanic.streaming.gl.SurfaceView;
import net.majorkernelpanic.streaming.video.H263Stream;
import net.majorkernelpanic.streaming.video.H264Stream;
import net.majorkernelpanic.streaming.video.VideoQuality;
import net.majorkernelpanic.streaming.video.VideoStream;
import android.content.Context;
import android.hardware.Camera.CameraInfo;
import android.preference.PreferenceManager;
/**
* Call {@link #getInstance()} to get access to the SessionBuilder.
*/
public class SessionBuilder {
public final static String TAG = "SessionBuilder";
/** Can be used with {@link #setVideoEncoder}. */
public final static int VIDEO_NONE = 0;
/** Can be used with {@link #setVideoEncoder}. */
public final static int VIDEO_H264 = 1;
/** Can be used with {@link #setVideoEncoder}. */
public final static int VIDEO_H263 = 2;
/** Can be used with {@link #setAudioEncoder}. */
public final static int AUDIO_NONE = 0;
/** Can be used with {@link #setAudioEncoder}. */
public final static int AUDIO_AMRNB = 3;
/** Can be used with {@link #setAudioEncoder}. */
public final static int AUDIO_AAC = 5;
// Default configuration
private VideoQuality mVideoQuality = VideoQuality.DEFAULT_VIDEO_QUALITY;
private AudioQuality mAudioQuality = AudioQuality.DEFAULT_AUDIO_QUALITY;
private Context mContext;
private int mVideoEncoder = VIDEO_H263;
private int mAudioEncoder = AUDIO_AMRNB;
private int mCamera = CameraInfo.CAMERA_FACING_BACK;
private int mTimeToLive = 64;
private int mOrientation = 0;
private boolean mFlash = false;
private SurfaceView mSurfaceView = null;
private String mOrigin = null;
private String mDestination = null;
private Session.Callback mCallback = null;
// Removes the default public constructor
private SessionBuilder() {}
// The SessionManager implements the singleton pattern
private static volatile SessionBuilder sInstance = null;
/**
* Returns a reference to the {@link SessionBuilder}.
* @return The reference to the {@link SessionBuilder}
*/
public final static SessionBuilder getInstance() {
if (sInstance == null) {
synchronized (SessionBuilder.class) {
if (sInstance == null) {
SessionBuilder.sInstance = new SessionBuilder();
}
}
}
return sInstance;
}
/**
* Creates a new {@link Session}.
* @return The new Session
* @throws IOException
*/
public Session build() {
Session session;
session = new Session();
session.setOrigin(mOrigin);
session.setDestination(mDestination);
session.setTimeToLive(mTimeToLive);
session.setCallback(mCallback);
switch (mAudioEncoder) {
case AUDIO_AAC:
AACStream stream = new AACStream();
session.addAudioTrack(stream);
if (mContext!=null)
stream.setPreferences(PreferenceManager.getDefaultSharedPreferences(mContext));
break;
case AUDIO_AMRNB:
session.addAudioTrack(new AMRNBStream());
break;
}
switch (mVideoEncoder) {
case VIDEO_H263:
session.addVideoTrack(new H263Stream(mCamera));
break;
case VIDEO_H264:
H264Stream stream = new H264Stream(mCamera);
if (mContext!=null)
stream.setPreferences(PreferenceManager.getDefaultSharedPreferences(mContext));
session.addVideoTrack(stream);
break;
}
if (session.getVideoTrack()!=null) {
VideoStream video = session.getVideoTrack();
video.setFlashState(mFlash);
video.setVideoQuality(mVideoQuality);
video.setSurfaceView(mSurfaceView);
video.setPreviewOrientation(mOrientation);
video.setDestinationPorts(5006);
}
if (session.getAudioTrack()!=null) {
AudioStream audio = session.getAudioTrack();
audio.setAudioQuality(mAudioQuality);
audio.setDestinationPorts(5004);
}
return session;
}
/**
* Access to the context is needed for the H264Stream class to store some stuff in the SharedPreferences.
* Note that you should pass the Application context, not the context of an Activity.
**/
public SessionBuilder setContext(Context context) {
mContext = context;
return this;
}
/** Sets the destination of the session. */
public SessionBuilder setDestination(String destination) {
mDestination = destination;
return this;
}
/** Sets the origin of the session. It appears in the SDP of the session. */
public SessionBuilder setOrigin(String origin) {
mOrigin = origin;
return this;
}
/** Sets the video stream quality. */
public SessionBuilder setVideoQuality(VideoQuality quality) {
mVideoQuality = quality.clone();
return this;
}
/** Sets the audio encoder. */
public SessionBuilder setAudioEncoder(int encoder) {
mAudioEncoder = encoder;
return this;
}
/** Sets the audio quality. */
public SessionBuilder setAudioQuality(AudioQuality quality) {
mAudioQuality = quality.clone();
return this;
}
/** Sets the default video encoder. */
public SessionBuilder setVideoEncoder(int encoder) {
mVideoEncoder = encoder;
return this;
}
public SessionBuilder setFlashEnabled(boolean enabled) {
mFlash = enabled;
return this;
}
public SessionBuilder setCamera(int camera) {
mCamera = camera;
return this;
}
public SessionBuilder setTimeToLive(int ttl) {
mTimeToLive = ttl;
return this;
}
/**
* Sets the SurfaceView required to preview the video stream.
**/
public SessionBuilder setSurfaceView(SurfaceView surfaceView) {
mSurfaceView = surfaceView;
return this;
}
/**
* Sets the orientation of the preview.
* @param orientation The orientation of the preview
*/
public SessionBuilder setPreviewOrientation(int orientation) {
mOrientation = orientation;
return this;
}
public SessionBuilder setCallback(Session.Callback callback) {
mCallback = callback;
return this;
}
/** Returns the context set with {@link #setContext(Context)}*/
public Context getContext() {
return mContext;
}
/** Returns the destination ip address set with {@link #setDestination(String)}. */
public String getDestination() {
return mDestination;
}
/** Returns the origin ip address set with {@link #setOrigin(String)}. */
public String getOrigin() {
return mOrigin;
}
/** Returns the audio encoder set with {@link #setAudioEncoder(int)}. */
public int getAudioEncoder() {
return mAudioEncoder;
}
/** Returns the id of the {@link android.hardware.Camera} set with {@link #setCamera(int)}. */
public int getCamera() {
return mCamera;
}
/** Returns the video encoder set with {@link #setVideoEncoder(int)}. */
public int getVideoEncoder() {
return mVideoEncoder;
}
/** Returns the VideoQuality set with {@link #setVideoQuality(VideoQuality)}. */
public VideoQuality getVideoQuality() {
return mVideoQuality;
}
/** Returns the AudioQuality set with {@link #setAudioQuality(AudioQuality)}. */
public AudioQuality getAudioQuality() {
return mAudioQuality;
}
/** Returns the flash state set with {@link #setFlashEnabled(boolean)}. */
public boolean getFlashState() {
return mFlash;
}
/** Returns the SurfaceView set with {@link #setSurfaceView(SurfaceView)}. */
public SurfaceView getSurfaceView() {
return mSurfaceView;
}
/** Returns the time to live set with {@link #setTimeToLive(int)}. */
public int getTimeToLive() {
return mTimeToLive;
}
/** Returns a new {@link SessionBuilder} with the same configuration. */
public SessionBuilder clone() {
return new SessionBuilder()
.setDestination(mDestination)
.setOrigin(mOrigin)
.setSurfaceView(mSurfaceView)
.setPreviewOrientation(mOrientation)
.setVideoQuality(mVideoQuality)
.setVideoEncoder(mVideoEncoder)
.setFlashEnabled(mFlash)
.setCamera(mCamera)
.setTimeToLive(mTimeToLive)
.setAudioEncoder(mAudioEncoder)
.setAudioQuality(mAudioQuality)
.setContext(mContext)
.setCallback(mCallback);
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming;
import java.io.IOException;
import java.io.OutputStream;
import java.net.InetAddress;
/**
* An interface that represents a Stream.
*/
public interface Stream {
/**
* Configures the stream. You need to call this before calling {@link #getSessionDescription()}
* to apply your configuration of the stream.
*/
public void configure() throws IllegalStateException, IOException;
/**
* Starts the stream.
* This method can only be called after {@link Stream#configure()}.
*/
public void start() throws IllegalStateException, IOException;
/**
* Stops the stream.
*/
public void stop();
/**
* Sets the Time To Live of packets sent over the network.
* @param ttl The time to live
* @throws IOException
*/
public void setTimeToLive(int ttl) throws IOException;
/**
* Sets the destination ip address of the stream.
* @param dest The destination address of the stream
*/
public void setDestinationAddress(InetAddress dest);
/**
* Sets the destination ports of the stream.
* If an odd number is supplied for the destination port then the next
* lower even number will be used for RTP and it will be used for RTCP.
* If an even number is supplied, it will be used for RTP and the next odd
* number will be used for RTCP.
* @param dport The destination port
*/
public void setDestinationPorts(int dport);
/**
* Sets the destination ports of the stream.
* @param rtpPort Destination port that will be used for RTP
* @param rtcpPort Destination port that will be used for RTCP
*/
public void setDestinationPorts(int rtpPort, int rtcpPort);
/**
* If a TCP is used as the transport protocol for the RTP session,
* the output stream to which RTP packets will be written to must
* be specified with this method.
*/
public void setOutputStream(OutputStream stream, byte channelIdentifier);
/**
* Returns a pair of source ports, the first one is the
* one used for RTP and the second one is used for RTCP.
**/
public int[] getLocalPorts();
/**
* Returns a pair of destination ports, the first one is the
* one used for RTP and the second one is used for RTCP.
**/
public int[] getDestinationPorts();
/**
* Returns the SSRC of the underlying {@link net.majorkernelpanic.streaming.rtp.RtpSocket}.
* @return the SSRC of the stream.
*/
public int getSSRC();
/**
* Returns an approximation of the bit rate consumed by the stream in bit per seconde.
*/
public long getBitrate();
/**
* Returns a description of the stream using SDP.
* This method can only be called after {@link Stream#configure()}.
* @throws IllegalStateException Thrown when {@link Stream#configure()} wa not called.
*/
public String getSessionDescription() throws IllegalStateException;
public boolean isStreaming();
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.audio;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.lang.reflect.Field;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import net.majorkernelpanic.streaming.SessionBuilder;
import net.majorkernelpanic.streaming.rtp.AACADTSPacketizer;
import net.majorkernelpanic.streaming.rtp.AACLATMPacketizer;
import net.majorkernelpanic.streaming.rtp.MediaCodecInputStream;
import android.annotation.SuppressLint;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Environment;
import android.service.textservice.SpellCheckerService.Session;
import android.util.Log;
/**
* A class for streaming AAC from the camera of an android device using RTP.
* You should use a {@link Session} instantiated with {@link SessionBuilder} instead of using this class directly.
* Call {@link #setDestinationAddress(InetAddress)}, {@link #setDestinationPorts(int)} and {@link #setAudioQuality(AudioQuality)}
* to configure the stream. You can then call {@link #start()} to start the RTP stream.
* Call {@link #stop()} to stop the stream.
*/
public class AACStream extends AudioStream {
public final static String TAG = "AACStream";
/** MPEG-4 Audio Object Types supported by ADTS. **/
private static final String[] AUDIO_OBJECT_TYPES = {
"NULL", // 0
"AAC Main", // 1
"AAC LC (Low Complexity)", // 2
"AAC SSR (Scalable Sample Rate)", // 3
"AAC LTP (Long Term Prediction)" // 4
};
/** There are 13 supported frequencies by ADTS. **/
public static final int[] AUDIO_SAMPLING_RATES = {
96000, // 0
88200, // 1
64000, // 2
48000, // 3
44100, // 4
32000, // 5
24000, // 6
22050, // 7
16000, // 8
12000, // 9
11025, // 10
8000, // 11
7350, // 12
-1, // 13
-1, // 14
-1, // 15
};
private String mSessionDescription = null;
private int mProfile, mSamplingRateIndex, mChannel, mConfig;
private SharedPreferences mSettings = null;
private AudioRecord mAudioRecord = null;
private Thread mThread = null;
public AACStream() {
super();
if (!AACStreamingSupported()) {
Log.e(TAG,"AAC not supported on this phone");
throw new RuntimeException("AAC not supported by this phone !");
} else {
Log.d(TAG,"AAC supported on this phone");
}
}
private static boolean AACStreamingSupported() {
if (Build.VERSION.SDK_INT<14) return false;
try {
MediaRecorder.OutputFormat.class.getField("AAC_ADTS");
return true;
} catch (Exception e) {
return false;
}
}
/**
* Some data (the actual sampling rate used by the phone and the AAC profile) needs to be stored once {@link #getSessionDescription()} is called.
* @param prefs The SharedPreferences that will be used to store the sampling rate
*/
public void setPreferences(SharedPreferences prefs) {
mSettings = prefs;
}
@Override
public synchronized void start() throws IllegalStateException, IOException {
if (!mStreaming) {
configure();
super.start();
}
}
public synchronized void configure() throws IllegalStateException, IOException {
super.configure();
mQuality = mRequestedQuality.clone();
// Checks if the user has supplied an exotic sampling rate
int i=0;
for (;i<AUDIO_SAMPLING_RATES.length;i++) {
if (AUDIO_SAMPLING_RATES[i] == mQuality.samplingRate) {
mSamplingRateIndex = i;
break;
}
}
// If he did, we force a reasonable one: 16 kHz
if (i>12) mQuality.samplingRate = 16000;
if (mMode != mRequestedMode || mPacketizer==null) {
mMode = mRequestedMode;
if (mMode == MODE_MEDIARECORDER_API) {
mPacketizer = new AACADTSPacketizer();
} else {
mPacketizer = new AACLATMPacketizer();
}
mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort);
mPacketizer.getRtpSocket().setOutputStream(mOutputStream, mChannelIdentifier);
}
if (mMode == MODE_MEDIARECORDER_API) {
testADTS();
// All the MIME types parameters used here are described in RFC 3640
// SizeLength: 13 bits will be enough because ADTS uses 13 bits for frame length
// config: contains the object type + the sampling rate + the channel number
// TODO: streamType always 5 ? profile-level-id always 15 ?
mSessionDescription = "m=audio "+String.valueOf(getDestinationPorts()[0])+" RTP/AVP 96\r\n" +
"a=rtpmap:96 mpeg4-generic/"+mQuality.samplingRate+"\r\n"+
"a=fmtp:96 streamtype=5; profile-level-id=15; mode=AAC-hbr; config="+Integer.toHexString(mConfig)+"; SizeLength=13; IndexLength=3; IndexDeltaLength=3;\r\n";
} else {
mProfile = 2; // AAC LC
mChannel = 1;
mConfig = (mProfile & 0x1F) << 11 | (mSamplingRateIndex & 0x0F) << 7 | (mChannel & 0x0F) << 3;
mSessionDescription = "m=audio "+String.valueOf(getDestinationPorts()[0])+" RTP/AVP 96\r\n" +
"a=rtpmap:96 mpeg4-generic/"+mQuality.samplingRate+"\r\n"+
"a=fmtp:96 streamtype=5; profile-level-id=15; mode=AAC-hbr; config="+Integer.toHexString(mConfig)+"; SizeLength=13; IndexLength=3; IndexDeltaLength=3;\r\n";
}
}
@Override
protected void encodeWithMediaRecorder() throws IOException {
testADTS();
((AACADTSPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate);
super.encodeWithMediaRecorder();
}
@Override
@SuppressLint({ "InlinedApi", "NewApi" })
protected void encodeWithMediaCodec() throws IOException {
final int bufferSize = AudioRecord.getMinBufferSize(mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)*2;
((AACLATMPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate);
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mAudioRecord.startRecording();
mMediaCodec.start();
final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
mThread = new Thread(new Runnable() {
@Override
public void run() {
int len = 0, bufferIndex = 0;
try {
while (!Thread.interrupted()) {
bufferIndex = mMediaCodec.dequeueInputBuffer(10000);
if (bufferIndex>=0) {
inputBuffers[bufferIndex].clear();
len = mAudioRecord.read(inputBuffers[bufferIndex], bufferSize);
if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) {
Log.e(TAG,"An error occured with the AudioRecord API !");
} else {
//Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity());
mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime()/1000, 0);
}
}
}
} catch (RuntimeException e) {
e.printStackTrace();
}
}
});
mThread.start();
// The packetizer encapsulates this stream in an RTP stream and send it over the network
mPacketizer.setInputStream(inputStream);
mPacketizer.start();
mStreaming = true;
}
/** Stops the stream. */
public synchronized void stop() {
if (mStreaming) {
if (mMode==MODE_MEDIACODEC_API) {
Log.d(TAG, "Interrupting threads...");
mThread.interrupt();
mAudioRecord.stop();
mAudioRecord.release();
mAudioRecord = null;
}
super.stop();
}
}
/**
* Returns a description of the stream using SDP. It can then be included in an SDP file.
* Will fail if called when streaming.
*/
public String getSessionDescription() throws IllegalStateException {
if (mSessionDescription == null) throw new IllegalStateException("You need to call configure() first !");
return mSessionDescription;
}
/**
* Records a short sample of AAC ADTS from the microphone to find out what the sampling rate really is
* On some phone indeed, no error will be reported if the sampling rate used differs from the
* one selected with setAudioSamplingRate
* @throws IOException
* @throws IllegalStateException
*/
@SuppressLint("InlinedApi")
private void testADTS() throws IllegalStateException, IOException {
setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
try {
Field name = MediaRecorder.OutputFormat.class.getField("AAC_ADTS");
setOutputFormat(name.getInt(null));
}
catch (Exception ignore) {
setOutputFormat(6);
}
String key = PREF_PREFIX+"aac-"+mQuality.samplingRate;
if (mSettings!=null && mSettings.contains(key)) {
String[] s = mSettings.getString(key, "").split(",");
mQuality.samplingRate = Integer.valueOf(s[0]);
mConfig = Integer.valueOf(s[1]);
mChannel = Integer.valueOf(s[2]);
return;
}
final String TESTFILE = Environment.getExternalStorageDirectory().getPath()+"/spydroid-test.adts";
if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
throw new IllegalStateException("No external storage or external storage not ready !");
}
// The structure of an ADTS packet is described here: http://wiki.multimedia.cx/index.php?title=ADTS
// ADTS header is 7 or 9 bytes long
byte[] buffer = new byte[9];
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setAudioSource(mAudioSource);
mMediaRecorder.setOutputFormat(mOutputFormat);
mMediaRecorder.setAudioEncoder(mAudioEncoder);
mMediaRecorder.setAudioChannels(1);
mMediaRecorder.setAudioSamplingRate(mQuality.samplingRate);
mMediaRecorder.setAudioEncodingBitRate(mQuality.bitRate);
mMediaRecorder.setOutputFile(TESTFILE);
mMediaRecorder.setMaxDuration(1000);
mMediaRecorder.prepare();
mMediaRecorder.start();
// We record for 1 sec
// TODO: use the MediaRecorder.OnInfoListener
try {
Thread.sleep(2000);
} catch (InterruptedException e) {}
mMediaRecorder.stop();
mMediaRecorder.release();
mMediaRecorder = null;
File file = new File(TESTFILE);
RandomAccessFile raf = new RandomAccessFile(file, "r");
// ADTS packets start with a sync word: 12bits set to 1
while (true) {
if ( (raf.readByte()&0xFF) == 0xFF ) {
buffer[0] = raf.readByte();
if ( (buffer[0]&0xF0) == 0xF0) break;
}
}
raf.read(buffer,1,5);
mSamplingRateIndex = (buffer[1]&0x3C)>>2 ;
mProfile = ( (buffer[1]&0xC0) >> 6 ) + 1 ;
mChannel = (buffer[1]&0x01) << 2 | (buffer[2]&0xC0) >> 6 ;
mQuality.samplingRate = AUDIO_SAMPLING_RATES[mSamplingRateIndex];
// 5 bits for the object type / 4 bits for the sampling rate / 4 bits for the channel / padding
mConfig = (mProfile & 0x1F) << 11 | (mSamplingRateIndex & 0x0F) << 7 | (mChannel & 0x0F) << 3;
Log.i(TAG,"MPEG VERSION: " + ( (buffer[0]&0x08) >> 3 ) );
Log.i(TAG,"PROTECTION: " + (buffer[0]&0x01) );
Log.i(TAG,"PROFILE: " + AUDIO_OBJECT_TYPES[ mProfile ] );
Log.i(TAG,"SAMPLING FREQUENCY: " + mQuality.samplingRate );
Log.i(TAG,"CHANNEL: " + mChannel );
raf.close();
if (mSettings!=null) {
Editor editor = mSettings.edit();
editor.putString(key, mQuality.samplingRate+","+mConfig+","+mChannel);
editor.commit();
}
if (!file.delete()) Log.e(TAG,"Temp file could not be erased");
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.audio;
import java.io.IOException;
import java.lang.reflect.Field;
import net.majorkernelpanic.streaming.SessionBuilder;
import net.majorkernelpanic.streaming.rtp.AMRNBPacketizer;
import android.media.MediaRecorder;
import android.service.textservice.SpellCheckerService.Session;
/**
* A class for streaming AAC from the camera of an android device using RTP.
* You should use a {@link Session} instantiated with {@link SessionBuilder} instead of using this class directly.
* Call {@link #setDestinationAddress(InetAddress)}, {@link #setDestinationPorts(int)} and {@link #setAudioQuality(AudioQuality)}
* to configure the stream. You can then call {@link #start()} to start the RTP stream.
* Call {@link #stop()} to stop the stream.
*/
public class AMRNBStream extends AudioStream {
public AMRNBStream() {
super();
mPacketizer = new AMRNBPacketizer();
setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
try {
// RAW_AMR was deprecated in API level 16.
Field deprecatedName = MediaRecorder.OutputFormat.class.getField("RAW_AMR");
setOutputFormat(deprecatedName.getInt(null));
} catch (Exception e) {
setOutputFormat(MediaRecorder.OutputFormat.AMR_NB);
}
setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
}
/**
* Starts the stream.
*/
public synchronized void start() throws IllegalStateException, IOException {
if (!mStreaming) {
configure();
super.start();
}
}
public synchronized void configure() throws IllegalStateException, IOException {
super.configure();
mMode = MODE_MEDIARECORDER_API;
mQuality = mRequestedQuality.clone();
}
/**
* Returns a description of the stream using SDP. It can then be included in an SDP file.
*/
public String getSessionDescription() {
return "m=audio "+String.valueOf(getDestinationPorts()[0])+" RTP/AVP 96\r\n" +
"a=rtpmap:96 AMR/8000\r\n" +
"a=fmtp:96 octet-align=1;\r\n";
}
@Override
protected void encodeWithMediaCodec() throws IOException {
super.encodeWithMediaRecorder();
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.audio;
/**
* A class that represents the quality of an audio stream.
*/
public class AudioQuality {
/** Default audio stream quality. */
public final static AudioQuality DEFAULT_AUDIO_QUALITY = new AudioQuality(8000,32000);
/** Represents a quality for a video stream. */
public AudioQuality() {}
/**
* Represents a quality for an audio stream.
* @param samplingRate The sampling rate
* @param bitRate The bitrate in bit per seconds
*/
public AudioQuality(int samplingRate, int bitRate) {
this.samplingRate = samplingRate;
this.bitRate = bitRate;
}
public int samplingRate = 0;
public int bitRate = 0;
public boolean equals(AudioQuality quality) {
if (quality==null) return false;
return (quality.samplingRate == this.samplingRate &&
quality.bitRate == this.bitRate);
}
public AudioQuality clone() {
return new AudioQuality(samplingRate, bitRate);
}
public static AudioQuality parseQuality(String str) {
AudioQuality quality = DEFAULT_AUDIO_QUALITY.clone();
if (str != null) {
String[] config = str.split("-");
try {
quality.bitRate = Integer.parseInt(config[0])*1000; // conversion to bit/s
quality.samplingRate = Integer.parseInt(config[1]);
}
catch (IndexOutOfBoundsException ignore) {}
}
return quality;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.audio;
import java.io.FileDescriptor;
import java.io.IOException;
import java.io.InputStream;
import net.majorkernelpanic.streaming.MediaStream;
import android.media.MediaRecorder;
import android.os.ParcelFileDescriptor;
import android.util.Log;
/**
* Don't use this class directly.
*/
public abstract class AudioStream extends MediaStream {
protected int mAudioSource;
protected int mOutputFormat;
protected int mAudioEncoder;
protected AudioQuality mRequestedQuality = AudioQuality.DEFAULT_AUDIO_QUALITY.clone();
protected AudioQuality mQuality = mRequestedQuality.clone();
public AudioStream() {
setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
}
public void setAudioSource(int audioSource) {
mAudioSource = audioSource;
}
public void setAudioQuality(AudioQuality quality) {
mRequestedQuality = quality;
}
/**
* Returns the quality of the stream.
*/
public AudioQuality getAudioQuality() {
return mQuality;
}
protected void setAudioEncoder(int audioEncoder) {
mAudioEncoder = audioEncoder;
}
protected void setOutputFormat(int outputFormat) {
mOutputFormat = outputFormat;
}
@Override
protected void encodeWithMediaRecorder() throws IOException {
// We need a local socket to forward data output by the camera to the packetizer
createSockets();
Log.v(TAG,"Requested audio with "+mQuality.bitRate/1000+"kbps"+" at "+mQuality.samplingRate/1000+"kHz");
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setAudioSource(mAudioSource);
mMediaRecorder.setOutputFormat(mOutputFormat);
mMediaRecorder.setAudioEncoder(mAudioEncoder);
mMediaRecorder.setAudioChannels(1);
mMediaRecorder.setAudioSamplingRate(mQuality.samplingRate);
mMediaRecorder.setAudioEncodingBitRate(mQuality.bitRate);
// We write the output of the camera in a local socket instead of a file !
// This one little trick makes streaming feasible quiet simply: data from the camera
// can then be manipulated at the other end of the socket
FileDescriptor fd = null;
if (sPipeApi == PIPE_API_PFD) {
fd = mParcelWrite.getFileDescriptor();
} else {
fd = mSender.getFileDescriptor();
}
mMediaRecorder.setOutputFile(fd);
mMediaRecorder.setOutputFile(fd);
mMediaRecorder.prepare();
mMediaRecorder.start();
InputStream is = null;
if (sPipeApi == PIPE_API_PFD) {
is = new ParcelFileDescriptor.AutoCloseInputStream(mParcelRead);
} else {
try {
// mReceiver.getInputStream contains the data from the camera
is = mReceiver.getInputStream();
} catch (IOException e) {
stop();
throw new IOException("Something happened with the local sockets :/ Start failed !");
}
}
// the mPacketizer encapsulates this stream in an RTP stream and send it over the network
mPacketizer.setInputStream(is);
mPacketizer.start();
mStreaming = true;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.exceptions;
public class CameraInUseException extends RuntimeException {
public CameraInUseException(String message) {
super(message);
}
private static final long serialVersionUID = -1866132102949435675L;
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.exceptions;
public class ConfNotSupportedException extends RuntimeException {
public ConfNotSupportedException(String message) {
super(message);
}
private static final long serialVersionUID = 5876298277802827615L;
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.exceptions;
public class InvalidSurfaceException extends RuntimeException {
private static final long serialVersionUID = -7238661340093544496L;
public InvalidSurfaceException(String message) {
super(message);
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.exceptions;
import java.io.IOException;
public class StorageUnavailableException extends IOException {
public StorageUnavailableException(String message) {
super(message);
}
private static final long serialVersionUID = -7537890350373995089L;
}
/*
* Based on the work of fadden
*
* Copyright 2012 Google Inc. All Rights Reserved.
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.gl;
import android.annotation.SuppressLint;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import android.view.Surface;
@SuppressLint("NewApi")
public class SurfaceManager {
public final static String TAG = "TextureManager";
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLContext mEGLContext = null;
private EGLContext mEGLSharedContext = null;
private EGLSurface mEGLSurface = null;
private EGLDisplay mEGLDisplay = null;
private Surface mSurface;
/**
* Creates an EGL context and an EGL surface.
*/
public SurfaceManager(Surface surface, SurfaceManager manager) {
mSurface = surface;
mEGLSharedContext = manager.mEGLContext;
eglSetup();
}
/**
* Creates an EGL context and an EGL surface.
*/
public SurfaceManager(Surface surface) {
mSurface = surface;
eglSetup();
}
public void makeCurrent() {
if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext))
throw new RuntimeException("eglMakeCurrent failed");
}
public void swapBuffer() {
EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime(long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
checkEglError("eglPresentationTimeANDROID");
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
*/
private void eglSetup() {
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
throw new RuntimeException("unable to initialize EGL14");
}
// Configure EGL for recording and OpenGL ES 2.0.
int[] attribList;
if (mEGLSharedContext == null) {
attribList = new int[] {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_NONE
};
} else {
attribList = new int[] {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
};
}
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0);
checkEglError("eglCreateContext RGB888+recordable ES2");
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
if (mEGLSharedContext == null) {
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT, attrib_list, 0);
} else {
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], mEGLSharedContext, attrib_list, 0);
}
checkEglError("eglCreateContext");
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glDisable(GLES20.GL_CULL_FACE);
}
/**
* Discards all resources held by this class, notably the EGL context. Also releases the
* Surface that was passed to our constructor.
*/
public void release() {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
}
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
mEGLContext = EGL14.EGL_NO_CONTEXT;
mEGLSurface = EGL14.EGL_NO_SURFACE;
mSurface.release();
}
/**
* Checks for EGL errors. Throws an exception if one is found.
*/
private void checkEglError(String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.gl;
import java.util.concurrent.Semaphore;
import net.majorkernelpanic.streaming.MediaStream;
import net.majorkernelpanic.streaming.video.VideoStream;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.graphics.SurfaceTexture.OnFrameAvailableListener;
import android.os.Handler;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
/**
* An enhanced SurfaceView in which the camera preview will be rendered.
* This class was needed for two reasons. <br />
*
* First, it allows to use to feed MediaCodec with the camera preview
* using the surface-to-buffer method while rendering it in a surface
* visible to the user. To force the surface-to-buffer method in
* libstreaming, call {@link MediaStream#setStreamingMethod(byte)}
* with {@link MediaStream#MODE_MEDIACODEC_API_2}. <br />
*
* Second, it allows to force the aspect ratio of the SurfaceView
* to match the aspect ratio of the camera preview, so that the
* preview do not appear distorted to the user of your app. To do
* that, call {@link SurfaceView#setAspectRatioMode(int)} with
* {@link SurfaceView#ASPECT_RATIO_PREVIEW} after creating your
* {@link SurfaceView}. <br />
*
*/
public class SurfaceView extends android.view.SurfaceView implements Runnable, OnFrameAvailableListener, SurfaceHolder.Callback {
public final static String TAG = "SurfaceView";
/**
* The aspect ratio of the surface view will be equal
* to the aspect ration of the camera preview.
**/
public static final int ASPECT_RATIO_PREVIEW = 0x01;
/** The surface view will fill completely fill its parent. */
public static final int ASPECT_RATIO_STRETCH = 0x00;
private Thread mThread = null;
private Handler mHandler = null;
private boolean mFrameAvailable = false;
private boolean mRunning = true;
private int mAspectRatioMode = ASPECT_RATIO_STRETCH;
// The surface in which the preview is rendered
private SurfaceManager mViewSurfaceManager = null;
// The input surface of the MediaCodec
private SurfaceManager mCodecSurfaceManager = null;
// Handles the rendering of the SurfaceTexture we got
// from the camera, onto a Surface
private TextureManager mTextureManager = null;
private final Semaphore mLock = new Semaphore(0);
private final Object mSyncObject = new Object();
// Allows to force the aspect ratio of the preview
private ViewAspectRatioMeasurer mVARM = new ViewAspectRatioMeasurer();
public SurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
mHandler = new Handler();
getHolder().addCallback(this);
}
public void setAspectRatioMode(int mode) {
mAspectRatioMode = mode;
}
public SurfaceTexture getSurfaceTexture() {
return mTextureManager.getSurfaceTexture();
}
public void addMediaCodecSurface(Surface surface) {
synchronized (mSyncObject) {
mCodecSurfaceManager = new SurfaceManager(surface,mViewSurfaceManager);
}
}
public void removeMediaCodecSurface() {
synchronized (mSyncObject) {
if (mCodecSurfaceManager != null) {
mCodecSurfaceManager.release();
mCodecSurfaceManager = null;
}
}
}
public void startGLThread() {
Log.d(TAG,"Thread started.");
if (mTextureManager == null) {
mTextureManager = new TextureManager();
}
if (mTextureManager.getSurfaceTexture() == null) {
mThread = new Thread(SurfaceView.this);
mRunning = true;
mThread.start();
mLock.acquireUninterruptibly();
}
}
@Override
public void run() {
mViewSurfaceManager = new SurfaceManager(getHolder().getSurface());
mViewSurfaceManager.makeCurrent();
mTextureManager.createTexture().setOnFrameAvailableListener(this);
mLock.release();
try {
long ts = 0, oldts = 0;
while (mRunning) {
synchronized (mSyncObject) {
mSyncObject.wait(2500);
if (mFrameAvailable) {
mFrameAvailable = false;
mViewSurfaceManager.makeCurrent();
mTextureManager.updateFrame();
mTextureManager.drawFrame();
mViewSurfaceManager.swapBuffer();
if (mCodecSurfaceManager != null) {
mCodecSurfaceManager.makeCurrent();
mTextureManager.drawFrame();
oldts = ts;
ts = mTextureManager.getSurfaceTexture().getTimestamp();
//Log.d(TAG,"FPS: "+(1000000000/(ts-oldts)));
mCodecSurfaceManager.setPresentationTime(ts);
mCodecSurfaceManager.swapBuffer();
}
} else {
Log.e(TAG,"No frame received !");
}
}
}
} catch (InterruptedException ignore) {
} finally {
mViewSurfaceManager.release();
mTextureManager.release();
}
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (mSyncObject) {
mFrameAvailable = true;
mSyncObject.notifyAll();
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (mThread != null) {
mThread.interrupt();
}
mRunning = false;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (mVARM.getAspectRatio() > 0 && mAspectRatioMode == ASPECT_RATIO_PREVIEW) {
mVARM.measure(widthMeasureSpec, heightMeasureSpec);
setMeasuredDimension(mVARM.getMeasuredWidth(), mVARM.getMeasuredHeight());
} else {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
}
/**
* Requests a certain aspect ratio for the preview. You don't have to call this yourself,
* the {@link VideoStream} will do it when it's needed.
*/
public void requestAspectRatio(double aspectRatio) {
if (mVARM.getAspectRatio() != aspectRatio) {
mVARM.setAspectRatio(aspectRatio);
mHandler.post(new Runnable() {
@Override
public void run() {
if (mAspectRatioMode == ASPECT_RATIO_PREVIEW) {
requestLayout();
}
}
});
}
}
/**
* This class is a helper to measure views that require a specific aspect ratio.
* @author Jesper Borgstrup
*/
public class ViewAspectRatioMeasurer {
private double aspectRatio;
public void setAspectRatio(double aspectRatio) {
this.aspectRatio = aspectRatio;
}
public double getAspectRatio() {
return this.aspectRatio;
}
/**
* Measure with the aspect ratio given at construction.<br />
* <br />
* After measuring, get the width and height with the {@link #getMeasuredWidth()}
* and {@link #getMeasuredHeight()} methods, respectively.
* @param widthMeasureSpec The width <tt>MeasureSpec</tt> passed in your <tt>View.onMeasure()</tt> method
* @param heightMeasureSpec The height <tt>MeasureSpec</tt> passed in your <tt>View.onMeasure()</tt> method
*/
public void measure(int widthMeasureSpec, int heightMeasureSpec) {
measure(widthMeasureSpec, heightMeasureSpec, this.aspectRatio);
}
/**
* Measure with a specific aspect ratio<br />
* <br />
* After measuring, get the width and height with the {@link #getMeasuredWidth()}
* and {@link #getMeasuredHeight()} methods, respectively.
* @param widthMeasureSpec The width <tt>MeasureSpec</tt> passed in your <tt>View.onMeasure()</tt> method
* @param heightMeasureSpec The height <tt>MeasureSpec</tt> passed in your <tt>View.onMeasure()</tt> method
* @param aspectRatio The aspect ratio to calculate measurements in respect to
*/
public void measure(int widthMeasureSpec, int heightMeasureSpec, double aspectRatio) {
int widthMode = MeasureSpec.getMode( widthMeasureSpec );
int widthSize = widthMode == MeasureSpec.UNSPECIFIED ? Integer.MAX_VALUE : MeasureSpec.getSize( widthMeasureSpec );
int heightMode = MeasureSpec.getMode( heightMeasureSpec );
int heightSize = heightMode == MeasureSpec.UNSPECIFIED ? Integer.MAX_VALUE : MeasureSpec.getSize( heightMeasureSpec );
if ( heightMode == MeasureSpec.EXACTLY && widthMode == MeasureSpec.EXACTLY ) {
/*
* Possibility 1: Both width and height fixed
*/
measuredWidth = widthSize;
measuredHeight = heightSize;
} else if ( heightMode == MeasureSpec.EXACTLY ) {
/*
* Possibility 2: Width dynamic, height fixed
*/
measuredWidth = (int) Math.min( widthSize, heightSize * aspectRatio );
measuredHeight = (int) (measuredWidth / aspectRatio);
} else if ( widthMode == MeasureSpec.EXACTLY ) {
/*
* Possibility 3: Width fixed, height dynamic
*/
measuredHeight = (int) Math.min( heightSize, widthSize / aspectRatio );
measuredWidth = (int) (measuredHeight * aspectRatio);
} else {
/*
* Possibility 4: Both width and height dynamic
*/
if ( widthSize > heightSize * aspectRatio ) {
measuredHeight = heightSize;
measuredWidth = (int)( measuredHeight * aspectRatio );
} else {
measuredWidth = widthSize;
measuredHeight = (int) (measuredWidth / aspectRatio);
}
}
}
private Integer measuredWidth = null;
/**
* Get the width measured in the latest call to <tt>measure()</tt>.
*/
public int getMeasuredWidth() {
if ( measuredWidth == null ) {
throw new IllegalStateException( "You need to run measure() before trying to get measured dimensions" );
}
return measuredWidth;
}
private Integer measuredHeight = null;
/**
* Get the height measured in the latest call to <tt>measure()</tt>.
*/
public int getMeasuredHeight() {
if ( measuredHeight == null ) {
throw new IllegalStateException( "You need to run measure() before trying to get measured dimensions" );
}
return measuredHeight;
}
}
}
/*
* Based on the work of fadden
*
* Copyright 2012 Google Inc. All Rights Reserved.
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.gl;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import android.annotation.SuppressLint;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Log;
/**
* Code for rendering a texture onto a surface using OpenGL ES 2.0.
*/
@SuppressLint("InlinedApi")
public class TextureManager {
public final static String TAG = "TextureManager";
private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};
private FloatBuffer mTriangleVertices;
private static final String VERTEX_SHADER =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" + // highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private float[] mMVPMatrix = new float[16];
private float[] mSTMatrix = new float[16];
private int mProgram;
private int mTextureID = -12345;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
private SurfaceTexture mSurfaceTexture;
public TextureManager() {
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
}
public int getTextureId() {
return mTextureID;
}
public SurfaceTexture getSurfaceTexture() {
return mSurfaceTexture;
}
public void updateFrame() {
mSurfaceTexture.updateTexImage();
}
public void drawFrame() {
checkGlError("onDrawFrame start");
mSurfaceTexture.getTransformMatrix(mSTMatrix);
//GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
//GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
GLES20.glFinish();
}
/**
* Initializes GL state. Call this after the EGL surface has been created and made current.
*/
public SurfaceTexture createTexture() {
mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muSTMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameter");
mSurfaceTexture = new SurfaceTexture(mTextureID);
return mSurfaceTexture;
}
public void release() {
mSurfaceTexture = null;
}
/**
* Replaces the fragment shader. Pass in null to reset to default.
*/
public void changeFragmentShader(String fragmentShader) {
if (fragmentShader == null) {
fragmentShader = FRAGMENT_SHADER;
}
GLES20.glDeleteProgram(mProgram);
mProgram = createProgram(VERTEX_SHADER, fragmentShader);
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
checkGlError("glCreateShader type=" + shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
checkGlError("glCreateProgram");
if (program == 0) {
Log.e(TAG, "Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
return program;
}
public void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.hw;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
import android.annotation.SuppressLint;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.util.Log;
@SuppressLint("InlinedApi")
public class CodecManager {
public final static String TAG = "CodecManager";
public static final int[] SUPPORTED_COLOR_FORMATS = {
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar
};
private static Codec[] sEncoders = null;
private static Codec[] sDecoders = null;
static class Codec {
public Codec(String name, Integer[] formats) {
this.name = name;
this.formats = formats;
}
public String name;
public Integer[] formats;
}
/**
* Lists all encoders that claim to support a color format that we know how to use.
* @return A list of those encoders
*/
@SuppressLint("NewApi")
public synchronized static Codec[] findEncodersForMimeType(String mimeType) {
if (sEncoders != null) return sEncoders;
ArrayList<Codec> encoders = new ArrayList<>();
// We loop through the encoders, apparently this can take up to a sec (testes on a GS3)
for(int j = MediaCodecList.getCodecCount() - 1; j >= 0; j--){
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(j);
if (!codecInfo.isEncoder()) continue;
String[] types = codecInfo.getSupportedTypes();
for (int i = 0; i < types.length; i++) {
if (types[i].equalsIgnoreCase(mimeType)) {
try {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
Set<Integer> formats = new HashSet<>();
// And through the color formats supported
for (int k = 0; k < capabilities.colorFormats.length; k++) {
int format = capabilities.colorFormats[k];
for (int l=0;l<SUPPORTED_COLOR_FORMATS.length;l++) {
if (format == SUPPORTED_COLOR_FORMATS[l]) {
formats.add(format);
}
}
}
Codec codec = new Codec(codecInfo.getName(), (Integer[]) formats.toArray(new Integer[formats.size()]));
encoders.add(codec);
} catch (Exception e) {
Log.wtf(TAG,e);
}
}
}
}
sEncoders = (Codec[]) encoders.toArray(new Codec[encoders.size()]);
return sEncoders;
}
/**
* Lists all decoders that claim to support a color format that we know how to use.
* @return A list of those decoders
*/
@SuppressLint("NewApi")
public synchronized static Codec[] findDecodersForMimeType(String mimeType) {
if (sDecoders != null) return sDecoders;
ArrayList<Codec> decoders = new ArrayList<>();
// We loop through the decoders, apparently this can take up to a sec (testes on a GS3)
for(int j = MediaCodecList.getCodecCount() - 1; j >= 0; j--){
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(j);
if (codecInfo.isEncoder()) continue;
String[] types = codecInfo.getSupportedTypes();
for (int i = 0; i < types.length; i++) {
if (types[i].equalsIgnoreCase(mimeType)) {
try {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
Set<Integer> formats = new HashSet<>();
// And through the color formats supported
for (int k = 0; k < capabilities.colorFormats.length; k++) {
int format = capabilities.colorFormats[k];
for (int l=0;l<SUPPORTED_COLOR_FORMATS.length;l++) {
if (format == SUPPORTED_COLOR_FORMATS[l]) {
formats.add(format);
}
}
}
Codec codec = new Codec(codecInfo.getName(), (Integer[]) formats.toArray(new Integer[formats.size()]));
decoders.add(codec);
} catch (Exception e) {
Log.wtf(TAG,e);
}
}
}
}
sDecoders = (Codec[]) decoders.toArray(new Codec[decoders.size()]);
// We will use the decoder from google first, it seems to work properly on many phones
for (int i=0;i<sDecoders.length;i++) {
if (sDecoders[i].name.equalsIgnoreCase("omx.google.h264.decoder")) {
Codec codec = sDecoders[0];
sDecoders[0] = sDecoders[i];
sDecoders[i] = codec;
}
}
return sDecoders;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.hw;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.ByteBuffer;
import net.majorkernelpanic.streaming.hw.CodecManager.Codec;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Build;
import android.preference.PreferenceManager;
import android.util.Base64;
import android.util.Log;
/**
*
* The purpose of this class is to detect and by-pass some bugs (or underspecified configuration) that
* encoders available through the MediaCodec API may have. <br />
* Feeding the encoder with a surface is not tested here.
* Some bugs you may have encountered:<br />
* <ul>
* <li>U and V panes reversed</li>
* <li>Some padding is needed after the Y pane</li>
* <li>stride!=width or slice-height!=height</li>
* </ul>
*/
@SuppressLint("NewApi")
public class EncoderDebugger {
public final static String TAG = "EncoderDebugger";
/** Prefix that will be used for all shared preferences saved by libstreaming. */
private static final String PREF_PREFIX = "libstreaming-";
/**
* If this is set to false the test will be run only once and the result
* will be saved in the shared preferences.
*/
private static final boolean DEBUG = false;
/** Set this to true to see more logs. */
private static final boolean VERBOSE = false;
/** Will be incremented every time this test is modified. */
private static final int VERSION = 3;
/** Bit rate that will be used with the encoder. */
private final static int BITRATE = 1000000;
/** Frame rate that will be used to test the encoder. */
private final static int FRAMERATE = 20;
private final static String MIME_TYPE = "video/avc";
private final static int NB_DECODED = 34;
private final static int NB_ENCODED = 50;
private int mDecoderColorFormat, mEncoderColorFormat;
private String mDecoderName, mEncoderName, mErrorLog;
private MediaCodec mEncoder, mDecoder;
private int mWidth, mHeight, mSize;
private byte[] mSPS, mPPS;
private byte[] mData, mInitialImage;
private MediaFormat mDecOutputFormat;
private NV21Convertor mNV21;
private SharedPreferences mPreferences;
private byte[][] mVideo, mDecodedVideo;
private String mB64PPS, mB64SPS;
public synchronized static void asyncDebug(final Context context, final int width, final int height) {
new Thread(new Runnable() {
@Override
public void run() {
try {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
debug(prefs, width, height);
} catch (Exception e) {}
}
}).start();
}
public synchronized static EncoderDebugger debug(Context context, int width, int height) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
return debug(prefs, width, height);
}
public synchronized static EncoderDebugger debug(SharedPreferences prefs, int width, int height) {
EncoderDebugger debugger = new EncoderDebugger(prefs, width, height);
debugger.debug();
return debugger;
}
public String getB64PPS() {
return mB64PPS;
}
public String getB64SPS() {
return mB64SPS;
}
public String getEncoderName() {
return mEncoderName;
}
public int getEncoderColorFormat() {
return mEncoderColorFormat;
}
/** This {@link NV21Convertor} will do the necessary work to feed properly the encoder. */
public NV21Convertor getNV21Convertor() {
return mNV21;
}
/** A log of all the errors that occurred during the test. */
public String getErrorLog() {
return mErrorLog;
}
private EncoderDebugger(SharedPreferences prefs, int width, int height) {
mPreferences = prefs;
mWidth = width;
mHeight = height;
mSize = width*height;
reset();
}
private void reset() {
mNV21 = new NV21Convertor();
mVideo = new byte[NB_ENCODED][];
mDecodedVideo = new byte[NB_DECODED][];
mErrorLog = "";
mPPS = null;
mSPS = null;
}
private void debug() {
// If testing the phone again is not needed,
// we just restore the result from the shared preferences
if (!checkTestNeeded()) {
String resolution = mWidth+"x"+mHeight+"-";
boolean success = mPreferences.getBoolean(PREF_PREFIX+resolution+"success",false);
if (!success) {
throw new RuntimeException("Phone not supported with this resolution ("+mWidth+"x"+mHeight+")");
}
mNV21.setSize(mWidth, mHeight);
mNV21.setSliceHeigth(mPreferences.getInt(PREF_PREFIX+resolution+"sliceHeight", 0));
mNV21.setStride(mPreferences.getInt(PREF_PREFIX+resolution+"stride", 0));
mNV21.setYPadding(mPreferences.getInt(PREF_PREFIX+resolution+"padding", 0));
mNV21.setPlanar(mPreferences.getBoolean(PREF_PREFIX+resolution+"planar", false));
mNV21.setColorPanesReversed(mPreferences.getBoolean(PREF_PREFIX+resolution+"reversed", false));
mEncoderName = mPreferences.getString(PREF_PREFIX+resolution+"encoderName", "");
mEncoderColorFormat = mPreferences.getInt(PREF_PREFIX+resolution+"colorFormat", 0);
mB64PPS = mPreferences.getString(PREF_PREFIX+resolution+"pps", "");
mB64SPS = mPreferences.getString(PREF_PREFIX+resolution+"sps", "");
return;
}
if (VERBOSE) Log.d(TAG, ">>>> Testing the phone for resolution "+mWidth+"x"+mHeight);
// Builds a list of available encoders and decoders we may be able to use
// because they support some nice color formats
Codec[] encoders = CodecManager.findEncodersForMimeType(MIME_TYPE);
Codec[] decoders = CodecManager.findDecodersForMimeType(MIME_TYPE);
int count = 0, n = 1;
for (int i=0;i<encoders.length;i++) {
count += encoders[i].formats.length;
}
// Tries available encoders
for (int i=0;i<encoders.length;i++) {
for (int j=0;j<encoders[i].formats.length;j++) {
reset();
mEncoderName = encoders[i].name;
mEncoderColorFormat = encoders[i].formats[j];
if (VERBOSE) Log.v(TAG, ">> Test "+(n++)+"/"+count+": "+mEncoderName+" with color format "+mEncoderColorFormat+" at "+mWidth+"x"+mHeight);
// Converts from NV21 to YUV420 with the specified parameters
mNV21.setSize(mWidth, mHeight);
mNV21.setSliceHeigth(mHeight);
mNV21.setStride(mWidth);
mNV21.setYPadding(0);
mNV21.setEncoderColorFormat(mEncoderColorFormat);
// /!\ NV21Convertor can directly modify the input
createTestImage();
mData = mNV21.convert(mInitialImage);
try {
// Starts the encoder
configureEncoder();
searchSPSandPPS();
if (VERBOSE) Log.v(TAG, "SPS and PPS in b64: SPS="+mB64SPS+", PPS="+mB64PPS);
// Feeds the encoder with an image repeatedly to produce some NAL units
encode();
// We now try to decode the NALs with decoders available on the phone
boolean decoded = false;
for (int k=0;k<decoders.length && !decoded;k++) {
for (int l=0;l<decoders[k].formats.length && !decoded;l++) {
mDecoderName = decoders[k].name;
mDecoderColorFormat = decoders[k].formats[l];
try {
configureDecoder();
} catch (Exception e) {
if (VERBOSE) Log.d(TAG, mDecoderName+" can't be used with "+mDecoderColorFormat+" at "+mWidth+"x"+mHeight);
releaseDecoder();
break;
}
try {
decode(true);
if (VERBOSE) Log.d(TAG, mDecoderName+" successfully decoded the NALs (color format "+mDecoderColorFormat+")");
decoded = true;
} catch (Exception e) {
if (VERBOSE) Log.e(TAG, mDecoderName+" failed to decode the NALs");
e.printStackTrace();
} finally {
releaseDecoder();
}
}
}
if (!decoded) throw new RuntimeException("Failed to decode NALs from the encoder.");
// Compares the image before and after
if (!compareLumaPanes()) {
// TODO: try again with a different stride
// TODO: try again with the "stride" param
throw new RuntimeException("It is likely that stride!=width");
}
int padding;
if ((padding = checkPaddingNeeded())>0) {
if (padding<4096) {
if (VERBOSE) Log.d(TAG, "Some padding is needed: "+padding);
mNV21.setYPadding(padding);
createTestImage();
mData = mNV21.convert(mInitialImage);
encodeDecode();
} else {
// TODO: try again with a different sliceHeight
// TODO: try again with the "slice-height" param
throw new RuntimeException("It is likely that sliceHeight!=height");
}
}
createTestImage();
if (!compareChromaPanes(false)) {
if (compareChromaPanes(true)) {
mNV21.setColorPanesReversed(true);
if (VERBOSE) Log.d(TAG, "U and V pane are reversed");
} else {
throw new RuntimeException("Incorrect U or V pane...");
}
}
saveTestResult(true);
Log.v(TAG, "The encoder "+mEncoderName+" is usable with resolution "+mWidth+"x"+mHeight);
return;
} catch (Exception e) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw);
String stack = sw.toString();
String str = "Encoder "+mEncoderName+" cannot be used with color format "+mEncoderColorFormat;
if (VERBOSE) Log.e(TAG, str, e);
mErrorLog += str + "\n" + stack;
e.printStackTrace();
} finally {
releaseEncoder();
}
}
}
saveTestResult(false);
Log.e(TAG,"No usable encoder were found on the phone for resolution "+mWidth+"x"+mHeight);
throw new RuntimeException("No usable encoder were found on the phone for resolution "+mWidth+"x"+mHeight);
}
private boolean checkTestNeeded() {
String resolution = mWidth+"x"+mHeight+"-";
// Forces the test
if (DEBUG || mPreferences==null) return true;
// If the sdk has changed on the phone, or the version of the test
// it has to be run again
if (mPreferences.contains(PREF_PREFIX+resolution+"lastSdk")) {
int lastSdk = mPreferences.getInt(PREF_PREFIX+resolution+"lastSdk", 0);
int lastVersion = mPreferences.getInt(PREF_PREFIX+resolution+"lastVersion", 0);
if (Build.VERSION.SDK_INT>lastSdk || VERSION>lastVersion) {
return true;
}
} else {
return true;
}
return false;
}
/**
* Saves the result of the test in the shared preferences,
* we will run it again only if the SDK has changed on the phone,
* or if this test has been modified.
*/
private void saveTestResult(boolean success) {
String resolution = mWidth+"x"+mHeight+"-";
Editor editor = mPreferences.edit();
editor.putBoolean(PREF_PREFIX+resolution+"success", success);
if (success) {
editor.putInt(PREF_PREFIX+resolution+"lastSdk", Build.VERSION.SDK_INT);
editor.putInt(PREF_PREFIX+resolution+"lastVersion", VERSION);
editor.putInt(PREF_PREFIX+resolution+"sliceHeight", mNV21.getSliceHeigth());
editor.putInt(PREF_PREFIX+resolution+"stride", mNV21.getStride());
editor.putInt(PREF_PREFIX+resolution+"padding", mNV21.getYPadding());
editor.putBoolean(PREF_PREFIX+resolution+"planar", mNV21.getPlanar());
editor.putBoolean(PREF_PREFIX+resolution+"reversed", mNV21.getUVPanesReversed());
editor.putString(PREF_PREFIX+resolution+"encoderName", mEncoderName);
editor.putInt(PREF_PREFIX+resolution+"colorFormat", mEncoderColorFormat);
editor.putString(PREF_PREFIX+resolution+"encoderName", mEncoderName);
editor.putString(PREF_PREFIX+resolution+"pps", mB64PPS);
editor.putString(PREF_PREFIX+resolution+"sps", mB64SPS);
}
editor.commit();
}
/**
* Creates the test image that will be used to feed the encoder.
*/
private void createTestImage() {
mInitialImage = new byte[3*mSize/2];
for (int i=0;i<mSize;i++) {
mInitialImage[i] = (byte) (40+i%199);
}
for (int i=mSize;i<3*mSize/2;i+=2) {
mInitialImage[i] = (byte) (40+i%200);
mInitialImage[i+1] = (byte) (40+(i+99)%200);
}
}
/**
* Compares the Y pane of the initial image, and the Y pane
* after having encoded & decoded the image.
*/
private boolean compareLumaPanes() {
int d, e, f = 0;
for (int j=0;j<NB_DECODED;j++) {
for (int i=0;i<mSize;i+=10) {
d = (mInitialImage[i]&0xFF) - (mDecodedVideo[j][i]&0xFF);
e = (mInitialImage[i+1]&0xFF) - (mDecodedVideo[j][i+1]&0xFF);
d = d<0 ? -d : d;
e = e<0 ? -e : e;
if (d>50 && e>50) {
mDecodedVideo[j] = null;
f++;
break;
}
}
}
return f<=NB_DECODED/2;
}
private int checkPaddingNeeded() {
int i = 0, j = 3*mSize/2-1, max = 0;
int[] r = new int[NB_DECODED];
for (int k=0;k<NB_DECODED;k++) {
if (mDecodedVideo[k] != null) {
i = 0;
while (i<j && (mDecodedVideo[k][j-i]&0xFF)<50) i+=2;
if (i>0) {
r[k] = ((i>>6)<<6);
max = r[k]>max ? r[k] : max;
if (VERBOSE) Log.e(TAG,"Padding needed: "+r[k]);
} else {
if (VERBOSE) Log.v(TAG,"No padding needed.");
}
}
}
return ((max>>6)<<6);
}
/**
* Compares the U or V pane of the initial image, and the U or V pane
* after having encoded & decoded the image.
*/
private boolean compareChromaPanes(boolean crossed) {
int d, f = 0;
for (int j=0;j<NB_DECODED;j++) {
if (mDecodedVideo[j] != null) {
// We compare the U and V pane before and after
if (!crossed) {
for (int i=mSize;i<3*mSize/2;i+=1) {
d = (mInitialImage[i]&0xFF) - (mDecodedVideo[j][i]&0xFF);
d = d<0 ? -d : d;
if (d>50) {
//if (VERBOSE) Log.e(TAG,"BUG "+(i-mSize)+" d "+d);
f++;
break;
}
}
// We compare the V pane before with the U pane after
} else {
for (int i=mSize;i<3*mSize/2;i+=2) {
d = (mInitialImage[i]&0xFF) - (mDecodedVideo[j][i+1]&0xFF);
d = d<0 ? -d : d;
if (d>50) {
f++;
}
}
}
}
}
return f<=NB_DECODED/2;
}
/**
* Converts the image obtained from the decoder to NV21.
*/
private void convertToNV21(int k) {
byte[] buffer = new byte[3*mSize/2];
int stride = mWidth, sliceHeight = mHeight;
int colorFormat = mDecoderColorFormat;
boolean planar = false;
if (mDecOutputFormat != null) {
MediaFormat format = mDecOutputFormat;
if (format != null) {
if (format.containsKey("slice-height")) {
sliceHeight = format.getInteger("slice-height");
if (sliceHeight<mHeight) sliceHeight = mHeight;
}
if (format.containsKey("stride")) {
stride = format.getInteger("stride");
if (stride<mWidth) stride = mWidth;
}
if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT) && format.getInteger(MediaFormat.KEY_COLOR_FORMAT)>0) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
}
}
}
switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
planar = false;
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
planar = true;
break;
}
for (int i=0;i<mSize;i++) {
if (i%mWidth==0) i+=stride-mWidth;
buffer[i] = mDecodedVideo[k][i];
}
if (!planar) {
for (int i=0,j=0;j<mSize/4;i+=1,j+=1) {
if (i%mWidth/2==0) i+=(stride-mWidth)/2;
buffer[mSize+2*j+1] = mDecodedVideo[k][stride*sliceHeight+2*i];
buffer[mSize+2*j] = mDecodedVideo[k][stride*sliceHeight+2*i+1];
}
} else {
for (int i=0,j=0;j<mSize/4;i+=1,j+=1) {
if (i%mWidth/2==0) i+=(stride-mWidth)/2;
buffer[mSize+2*j+1] = mDecodedVideo[k][stride*sliceHeight+i];
buffer[mSize+2*j] = mDecodedVideo[k][stride*sliceHeight*5/4+i];
}
}
mDecodedVideo[k] = buffer;
}
/**
* Instantiates and starts the encoder.
* @throws IOException The encoder cannot be configured
*/
private void configureEncoder() throws IOException {
mEncoder = MediaCodec.createByCodecName(mEncoderName);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BITRATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMERATE);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mEncoderColorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mEncoder.start();
}
private void releaseEncoder() {
if (mEncoder != null) {
try {
mEncoder.stop();
} catch (Exception ignore) {}
try {
mEncoder.release();
} catch (Exception ignore) {}
}
}
/**
* Instantiates and starts the decoder.
* @throws IOException The decoder cannot be configured
*/
private void configureDecoder() throws IOException {
byte[] prefix = new byte[] {0x00,0x00,0x00,0x01};
ByteBuffer csd0 = ByteBuffer.allocate(4+mSPS.length+4+mPPS.length);
csd0.put(new byte[] {0x00,0x00,0x00,0x01});
csd0.put(mSPS);
csd0.put(new byte[] {0x00,0x00,0x00,0x01});
csd0.put(mPPS);
mDecoder = MediaCodec.createByCodecName(mDecoderName);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
mediaFormat.setByteBuffer("csd-0", csd0);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mDecoderColorFormat);
mDecoder.configure(mediaFormat, null, null, 0);
mDecoder.start();
ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers();
int decInputIndex = mDecoder.dequeueInputBuffer(1000000/FRAMERATE);
if (decInputIndex>=0) {
decInputBuffers[decInputIndex].clear();
decInputBuffers[decInputIndex].put(prefix);
decInputBuffers[decInputIndex].put(mSPS);
mDecoder.queueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].position(), timestamp(), 0);
} else {
if (VERBOSE) Log.e(TAG,"No buffer available !");
}
decInputIndex = mDecoder.dequeueInputBuffer(1000000/FRAMERATE);
if (decInputIndex>=0) {
decInputBuffers[decInputIndex].clear();
decInputBuffers[decInputIndex].put(prefix);
decInputBuffers[decInputIndex].put(mPPS);
mDecoder.queueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].position(), timestamp(), 0);
} else {
if (VERBOSE) Log.e(TAG,"No buffer available !");
}
}
private void releaseDecoder() {
if (mDecoder != null) {
try {
mDecoder.stop();
} catch (Exception ignore) {}
try {
mDecoder.release();
} catch (Exception ignore) {}
}
}
/**
* Tries to obtain the SPS and the PPS for the encoder.
*/
private long searchSPSandPPS() {
ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
byte[] csd = new byte[128];
int len = 0, p = 4, q = 4;
long elapsed = 0, now = timestamp();
while (elapsed<3000000 && (mSPS==null || mPPS==null)) {
// Some encoders won't give us the SPS and PPS unless they receive something to encode first...
int bufferIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
if (bufferIndex>=0) {
check(inputBuffers[bufferIndex].capacity()>=mData.length, "The input buffer is not big enough.");
inputBuffers[bufferIndex].clear();
inputBuffers[bufferIndex].put(mData, 0, mData.length);
mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
} else {
if (VERBOSE) Log.e(TAG,"No buffer available !");
}
// We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
// encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
// But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...
int index = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// The PPS and PPS shoud be there
MediaFormat format = mEncoder.getOutputFormat();
ByteBuffer spsb = format.getByteBuffer("csd-0");
ByteBuffer ppsb = format.getByteBuffer("csd-1");
mSPS = new byte[spsb.capacity()-4];
spsb.position(4);
spsb.get(mSPS,0,mSPS.length);
mPPS = new byte[ppsb.capacity()-4];
ppsb.position(4);
ppsb.get(mPPS,0,mPPS.length);
break;
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mEncoder.getOutputBuffers();
} else if (index>=0) {
len = info.size;
if (len<128) {
outputBuffers[index].get(csd,0,len);
if (len>0 && csd[0]==0 && csd[1]==0 && csd[2]==0 && csd[3]==1) {
// Parses the SPS and PPS, they could be in two different packets and in a different order
//depending on the phone so we don't make any assumption about that
while (p<len) {
while (!(csd[p+0]==0 && csd[p+1]==0 && csd[p+2]==0 && csd[p+3]==1) && p+3<len) p++;
if (p+3>=len) p=len;
if ((csd[q]&0x1F)==7) {
mSPS = new byte[p-q];
System.arraycopy(csd, q, mSPS, 0, p-q);
} else {
mPPS = new byte[p-q];
System.arraycopy(csd, q, mPPS, 0, p-q);
}
p += 4;
q = p;
}
}
}
mEncoder.releaseOutputBuffer(index, false);
}
elapsed = timestamp() - now;
}
check(mPPS != null && mSPS != null, "Could not determine the SPS & PPS.");
mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);
return elapsed;
}
private long encode() {
int n = 0;
long elapsed = 0, now = timestamp();
int encOutputIndex = 0, encInputIndex = 0;
BufferInfo info = new BufferInfo();
ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();
while (elapsed<5000000) {
// Feeds the encoder with an image
encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
if (encInputIndex>=0) {
check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
encInputBuffers[encInputIndex].clear();
encInputBuffers[encInputIndex].put(mData, 0, mData.length);
mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
} else {
if (VERBOSE) Log.d(TAG,"No buffer available !");
}
// Tries to get a NAL unit
encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
encOutputBuffers = mEncoder.getOutputBuffers();
} else if (encOutputIndex>=0) {
mVideo[n] = new byte[info.size];
encOutputBuffers[encOutputIndex].clear();
encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
mEncoder.releaseOutputBuffer(encOutputIndex, false);
if (n>=NB_ENCODED) {
flushMediaCodec(mEncoder);
return elapsed;
}
}
elapsed = timestamp() - now;
}
throw new RuntimeException("The encoder is too slow.");
}
/**
* @param withPrefix If set to true, the decoder will be fed with NALs preceeded with 0x00000001.
* @return How long it took to decode all the NALs
*/
private long decode(boolean withPrefix) {
int n = 0, i = 0, j = 0;
long elapsed = 0, now = timestamp();
int decInputIndex = 0, decOutputIndex = 0;
ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers();
ByteBuffer[] decOutputBuffers = mDecoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
while (elapsed<3000000) {
// Feeds the decoder with a NAL unit
if (i<NB_ENCODED) {
decInputIndex = mDecoder.dequeueInputBuffer(1000000/FRAMERATE);
if (decInputIndex>=0) {
int l1 = decInputBuffers[decInputIndex].capacity();
int l2 = mVideo[i].length;
decInputBuffers[decInputIndex].clear();
if ((withPrefix && hasPrefix(mVideo[i])) || (!withPrefix && !hasPrefix(mVideo[i]))) {
check(l1>=l2, "The decoder input buffer is not big enough (nal="+l2+", capacity="+l1+").");
decInputBuffers[decInputIndex].put(mVideo[i],0,mVideo[i].length);
} else if (withPrefix && !hasPrefix(mVideo[i])) {
check(l1>=l2+4, "The decoder input buffer is not big enough (nal="+(l2+4)+", capacity="+l1+").");
decInputBuffers[decInputIndex].put(new byte[] {0,0,0,1});
decInputBuffers[decInputIndex].put(mVideo[i],0,mVideo[i].length);
} else if (!withPrefix && hasPrefix(mVideo[i])) {
check(l1>=l2-4, "The decoder input buffer is not big enough (nal="+(l2-4)+", capacity="+l1+").");
decInputBuffers[decInputIndex].put(mVideo[i],4,mVideo[i].length-4);
}
mDecoder.queueInputBuffer(decInputIndex, 0, l2, timestamp(), 0);
i++;
} else {
if (VERBOSE) Log.d(TAG,"No buffer available !");
}
}
// Tries to get a decoded image
decOutputIndex = mDecoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
if (decOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
decOutputBuffers = mDecoder.getOutputBuffers();
} else if (decOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
mDecOutputFormat = mDecoder.getOutputFormat();
} else if (decOutputIndex>=0) {
if (n>2) {
// We have successfully encoded and decoded an image !
int length = info.size;
mDecodedVideo[j] = new byte[length];
decOutputBuffers[decOutputIndex].clear();
decOutputBuffers[decOutputIndex].get(mDecodedVideo[j], 0, length);
// Converts the decoded frame to NV21
convertToNV21(j);
if (j>=NB_DECODED-1) {
flushMediaCodec(mDecoder);
if (VERBOSE) Log.v(TAG, "Decoding "+n+" frames took "+elapsed/1000+" ms");
return elapsed;
}
j++;
}
mDecoder.releaseOutputBuffer(decOutputIndex, false);
n++;
}
elapsed = timestamp() - now;
}
throw new RuntimeException("The decoder did not decode anything.");
}
/**
* Makes sure the NAL has a header or not.
* @param withPrefix If set to true, the NAL will be preceded with 0x00000001.
*/
private boolean hasPrefix(byte[] nal) {
return nal[0] == 0 && nal[1] == 0 && nal[2] == 0 && nal[3] == 0x01;
}
/**
* @throws IOException The decoder cannot be configured.
*/
private void encodeDecode() throws IOException {
encode();
try {
configureDecoder();
decode(true);
} finally {
releaseDecoder();
}
}
private void flushMediaCodec(MediaCodec mc) {
int index = 0;
BufferInfo info = new BufferInfo();
while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
if (index>=0) {
mc.releaseOutputBuffer(index, false);
}
}
}
private void check(boolean cond, String message) {
if (!cond) {
if (VERBOSE) Log.e(TAG,message);
throw new IllegalStateException(message);
}
}
private long timestamp() {
return System.nanoTime()/1000;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.hw;
import java.nio.ByteBuffer;
import android.media.MediaCodecInfo;
import android.util.Log;
/**
* Converts from NV21 to YUV420 semi planar or planar.
*/
public class NV21Convertor {
private int mSliceHeight, mHeight;
private int mStride, mWidth;
private int mSize;
private boolean mPlanar, mPanesReversed = false;
private int mYPadding;
private byte[] mBuffer;
ByteBuffer mCopy;
public void setSize(int width, int height) {
mHeight = height;
mWidth = width;
mSliceHeight = height;
mStride = width;
mSize = mWidth*mHeight;
}
public void setStride(int width) {
mStride = width;
}
public void setSliceHeigth(int height) {
mSliceHeight = height;
}
public void setPlanar(boolean planar) {
mPlanar = planar;
}
public void setYPadding(int padding) {
mYPadding = padding;
}
public int getBufferSize() {
return 3*mSize/2;
}
public void setEncoderColorFormat(int colorFormat) {
switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
setPlanar(false);
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
setPlanar(true);
break;
}
}
public void setColorPanesReversed(boolean b) {
mPanesReversed = b;
}
public int getStride() {
return mStride;
}
public int getSliceHeigth() {
return mSliceHeight;
}
public int getYPadding() {
return mYPadding;
}
public boolean getPlanar() {
return mPlanar;
}
public boolean getUVPanesReversed() {
return mPanesReversed;
}
public void convert(byte[] data, ByteBuffer buffer) {
byte[] result = convert(data);
int min = buffer.capacity() < data.length?buffer.capacity() : data.length;
buffer.put(result, 0, min);
}
public byte[] convert(byte[] data) {
// A buffer large enough for every case
if (mBuffer==null || mBuffer.length != 3*mSliceHeight*mStride/2+mYPadding) {
mBuffer = new byte[3*mSliceHeight*mStride/2+mYPadding];
}
if (!mPlanar) {
if (mSliceHeight==mHeight && mStride==mWidth) {
// Swaps U and V
if (!mPanesReversed) {
for (int i = mSize; i < mSize+mSize/2; i += 2) {
mBuffer[0] = data[i+1];
data[i+1] = data[i];
data[i] = mBuffer[0];
}
}
if (mYPadding>0) {
System.arraycopy(data, 0, mBuffer, 0, mSize);
System.arraycopy(data, mSize, mBuffer, mSize+mYPadding, mSize/2);
return mBuffer;
}
return data;
}
} else {
if (mSliceHeight==mHeight && mStride==mWidth) {
// De-interleave U and V
if (!mPanesReversed) {
for (int i = 0; i < mSize/4; i+=1) {
mBuffer[i] = data[mSize+2*i+1];
mBuffer[mSize/4+i] = data[mSize+2*i];
}
} else {
for (int i = 0; i < mSize/4; i+=1) {
mBuffer[i] = data[mSize+2*i];
mBuffer[mSize/4+i] = data[mSize+2*i+1];
}
}
if (mYPadding == 0) {
System.arraycopy(mBuffer, 0, data, mSize, mSize/2);
} else {
System.arraycopy(data, 0, mBuffer, 0, mSize);
System.arraycopy(mBuffer, 0, mBuffer, mSize+mYPadding, mSize/2);
return mBuffer;
}
return data;
}
}
return data;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.mp4;
import java.io.FileNotFoundException;
import java.io.IOException;
import android.util.Base64;
import android.util.Log;
/**
* Finds SPS & PPS parameters in mp4 file.
*/
public class MP4Config {
public final static String TAG = "MP4Config";
private MP4Parser mp4Parser;
private String mProfilLevel, mPPS, mSPS;
public MP4Config(String profil, String sps, String pps) {
mProfilLevel = profil;
mPPS = pps;
mSPS = sps;
}
public MP4Config(String sps, String pps) {
mPPS = pps;
mSPS = sps;
mProfilLevel = MP4Parser.toHexString(Base64.decode(sps, Base64.NO_WRAP),1,3);
}
public MP4Config(byte[] sps, byte[] pps) {
mPPS = Base64.encodeToString(pps, 0, pps.length, Base64.NO_WRAP);
mSPS = Base64.encodeToString(sps, 0, sps.length, Base64.NO_WRAP);
mProfilLevel = MP4Parser.toHexString(sps,1,3);
}
/**
* Finds SPS & PPS parameters inside a .mp4.
* @param path Path to the file to analyze
* @throws IOException
* @throws FileNotFoundException
*/
public MP4Config (String path) throws IOException, FileNotFoundException {
StsdBox stsdBox;
// We open the mp4 file and parse it
try {
mp4Parser = MP4Parser.parse(path);
} catch (IOException ignore) {
// Maybe enough of the file has been parsed and we can get the stsd box
}
// We find the stsdBox
stsdBox = mp4Parser.getStsdBox();
mPPS = stsdBox.getB64PPS();
mSPS = stsdBox.getB64SPS();
mProfilLevel = stsdBox.getProfileLevel();
mp4Parser.close();
}
public String getProfileLevel() {
return mProfilLevel;
}
public String getB64PPS() {
Log.d(TAG, "PPS: "+mPPS);
return mPPS;
}
public String getB64SPS() {
Log.d(TAG, "SPS: "+mSPS);
return mSPS;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.mp4;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.util.HashMap;
import android.util.Base64;
import android.util.Log;
/**
* Parse an mp4 file.
* An mp4 file contains a tree where each node has a name and a size.
* This class is used by H264Stream.java to determine the SPS and PPS parameters of a short video recorded by the phone.
*/
public class MP4Parser {
private static final String TAG = "MP4Parser";
private HashMap<String, Long> mBoxes = new HashMap<>();
private final RandomAccessFile mFile;
private long mPos = 0;
/** Parses the mp4 file. **/
public static MP4Parser parse(String path) throws IOException {
return new MP4Parser(path);
}
private MP4Parser(final String path) throws IOException, FileNotFoundException {
mFile = new RandomAccessFile(new File(path), "r");
try {
parse("",mFile.length());
} catch (Exception e) {
e.printStackTrace();
throw new IOException("Parse error: malformed mp4 file");
}
}
public void close() {
try {
mFile.close();
} catch (Exception e) {};
}
public long getBoxPos(String box) throws IOException {
Long r = mBoxes.get(box);
if (r==null) throw new IOException("Box not found: "+box);
return mBoxes.get(box);
}
public StsdBox getStsdBox() throws IOException {
try {
return new StsdBox(mFile,getBoxPos("/moov/trak/mdia/minf/stbl/stsd"));
} catch (IOException e) {
throw new IOException("stsd box could not be found");
}
}
private void parse(String path, long len) throws IOException {
ByteBuffer byteBuffer;
long sum = 0, newlen = 0;
byte[] buffer = new byte[8];
String name = "";
if(!path.equals("")) mBoxes.put(path, mPos-8);
while (sum<len) {
mFile.read(buffer,0,8);
mPos += 8; sum += 8;
if (validBoxName(buffer)) {
name = new String(buffer,4,4);
if (buffer[3] == 1) {
// 64 bits atom size
mFile.read(buffer,0,8);
mPos += 8; sum += 8;
byteBuffer = ByteBuffer.wrap(buffer,0,8);
newlen = byteBuffer.getLong()-16;
} else {
// 32 bits atom size
byteBuffer = ByteBuffer.wrap(buffer,0,4);
newlen = byteBuffer.getInt()-8;
}
// 1061109559+8 correspond to "????" in ASCII the HTC Desire S seems to write that sometimes, maybe other phones do
// "wide" atom would produce a newlen == 0, and we shouldn't throw an exception because of that
if (newlen < 0 || newlen == 1061109559) throw new IOException();
Log.d(TAG, "Atom -> name: "+name+" position: "+mPos+", length: "+newlen);
sum += newlen;
parse(path+'/'+name,newlen);
}
else {
if( len < 8){
mFile.seek(mFile.getFilePointer() - 8 + len);
sum += len-8;
} else {
int skipped = mFile.skipBytes((int)(len-8));
if (skipped < ((int)(len-8))) {
throw new IOException();
}
mPos += len-8;
sum += len-8;
}
}
}
}
private boolean validBoxName(byte[] buffer) {
for (int i=0;i<4;i++) {
// If the next 4 bytes are neither lowercase letters nor numbers
if ((buffer[i+4]< 'a' || buffer[i+4]>'z') && (buffer[i+4]<'0'|| buffer[i+4]>'9') ) return false;
}
return true;
}
static String toHexString(byte[] buffer,int start, int len) {
String c;
StringBuilder s = new StringBuilder();
for (int i=start;i<start+len;i++) {
c = Integer.toHexString(buffer[i]&0xFF);
s.append( c.length()<2 ? "0"+c : c );
}
return s.toString();
}
}
class StsdBox {
private RandomAccessFile fis;
private byte[] buffer = new byte[4];
private long pos = 0;
private byte[] pps;
private byte[] sps;
private int spsLength, ppsLength;
/** Parse the sdsd box in an mp4 file
* fis: proper mp4 file
* pos: stsd box's position in the file
*/
public StsdBox (RandomAccessFile fis, long pos) {
this.fis = fis;
this.pos = pos;
findBoxAvcc();
findSPSandPPS();
}
public String getProfileLevel() {
return MP4Parser.toHexString(sps,1,3);
}
public String getB64PPS() {
return Base64.encodeToString(pps, 0, ppsLength, Base64.NO_WRAP);
}
public String getB64SPS() {
return Base64.encodeToString(sps, 0, spsLength, Base64.NO_WRAP);
}
private boolean findSPSandPPS() {
/*
* SPS and PPS parameters are stored in the avcC box
* You may find really useful information about this box
* in the document ISO-IEC 14496-15, part 5.2.4.1.1
* The box's structure is described there
* <pre>
* aligned(8) class AVCDecoderConfigurationRecord {
* unsigned int(8) configurationVersion = 1;
* unsigned int(8) AVCProfileIndication;
* unsigned int(8) profile_compatibility;
* unsigned int(8) AVCLevelIndication;
* bit(6) reserved = ‘111111’b;
* unsigned int(2) lengthSizeMinusOne;
* bit(3) reserved = ‘111’b;
* unsigned int(5) numOfSequenceParameterSets;
* for (i=0; i< numOfSequenceParameterSets; i++) {
* unsigned int(16) sequenceParameterSetLength ;
* bit(8*sequenceParameterSetLength) sequenceParameterSetNALUnit;
* }
* unsigned int(8) numOfPictureParameterSets;
* for (i=0; i< numOfPictureParameterSets; i++) {
* unsigned int(16) pictureParameterSetLength;
* bit(8*pictureParameterSetLength) pictureParameterSetNALUnit;
* }
* }
* </pre>
*/
try {
// TODO: Here we assume that numOfSequenceParameterSets = 1, numOfPictureParameterSets = 1 !
// Here we extract the SPS parameter
fis.skipBytes(7);
spsLength = 0xFF&fis.readByte();
sps = new byte[spsLength];
fis.read(sps,0,spsLength);
// Here we extract the PPS parameter
fis.skipBytes(2);
ppsLength = 0xFF&fis.readByte();
pps = new byte[ppsLength];
fis.read(pps,0,ppsLength);
} catch (IOException e) {
return false;
}
return true;
}
private boolean findBoxAvcc() {
try {
fis.seek(pos+8);
while (true) {
while (fis.read() != 'a');
fis.read(buffer,0,3);
if (buffer[0] == 'v' && buffer[1] == 'c' && buffer[2] == 'C') break;
}
} catch (IOException e) {
return false;
}
return true;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtcp;
import static net.majorkernelpanic.streaming.rtp.RtpSocket.TRANSPORT_TCP;
import static net.majorkernelpanic.streaming.rtp.RtpSocket.TRANSPORT_UDP;
import java.io.IOException;
import java.io.OutputStream;
import java.net.DatagramPacket;
import java.net.InetAddress;
import java.net.MulticastSocket;
import java.nio.channels.IllegalSelectorException;
import android.os.SystemClock;
import android.util.Log;
/**
* Implementation of Sender Report RTCP packets.
*/
public class SenderReport {
public static final int MTU = 1500;
private static final int PACKET_LENGTH = 28;
private MulticastSocket usock;
private DatagramPacket upack;
private int mTransport;
private OutputStream mOutputStream = null;
private byte[] mBuffer = new byte[MTU];
private int mSSRC, mPort = -1;
private int mOctetCount = 0, mPacketCount = 0;
private long interval, delta, now, oldnow;
private byte mTcpHeader[];
public SenderReport(int ssrc) throws IOException {
super();
this.mSSRC = ssrc;
}
public SenderReport() {
mTransport = TRANSPORT_UDP;
mTcpHeader = new byte[] {'$',0,0,PACKET_LENGTH};
/* Version(2) Padding(0) */
/* ^ ^ PT = 0 */
/* | | ^ */
/* | -------- | */
/* | |--------------------- */
/* | || */
/* | || */
mBuffer[0] = (byte) Integer.parseInt("10000000",2);
/* Packet Type PT */
mBuffer[1] = (byte) 200;
/* Byte 2,3 -> Length */
setLong(PACKET_LENGTH/4-1, 2, 4);
/* Byte 4,5,6,7 -> SSRC */
/* Byte 8,9,10,11 -> NTP timestamp hb */
/* Byte 12,13,14,15 -> NTP timestamp lb */
/* Byte 16,17,18,19 -> RTP timestamp */
/* Byte 20,21,22,23 -> packet count */
/* Byte 24,25,26,27 -> octet count */
try {
usock = new MulticastSocket();
} catch (IOException e) {
// Very unlikely to happen. Means that all UDP ports are already being used
throw new RuntimeException(e.getMessage());
}
upack = new DatagramPacket(mBuffer, 1);
// By default we sent one report every 3 secconde
interval = 3000;
}
public void close() {
usock.close();
}
/**
* Sets the temporal interval between two RTCP Sender Reports.
* Default interval is set to 3 seconds.
* Set 0 to disable RTCP.
* @param interval The interval in milliseconds
*/
public void setInterval(long interval) {
this.interval = interval;
}
/**
* Updates the number of packets sent, and the total amount of data sent.
* @param length The length of the packet
* @param rtpts
* The RTP timestamp.
* @throws IOException
**/
public void update(int length, long rtpts) throws IOException {
mPacketCount += 1;
mOctetCount += length;
setLong(mPacketCount, 20, 24);
setLong(mOctetCount, 24, 28);
now = SystemClock.elapsedRealtime();
delta += oldnow != 0 ? now-oldnow : 0;
oldnow = now;
if (interval>0 && delta>=interval) {
// We send a Sender Report
send(System.nanoTime(), rtpts);
delta = 0;
}
}
public void setSSRC(int ssrc) {
this.mSSRC = ssrc;
setLong(ssrc,4,8);
mPacketCount = 0;
mOctetCount = 0;
setLong(mPacketCount, 20, 24);
setLong(mOctetCount, 24, 28);
}
public void setDestination(InetAddress dest, int dport) {
mTransport = TRANSPORT_UDP;
mPort = dport;
upack.setPort(dport);
upack.setAddress(dest);
}
/**
* If a TCP is used as the transport protocol for the RTP session,
* the output stream to which RTP packets will be written to must
* be specified with this method.
*/
public void setOutputStream(OutputStream os, byte channelIdentifier) {
mTransport = TRANSPORT_TCP;
mOutputStream = os;
mTcpHeader[1] = channelIdentifier;
}
public int getPort() {
return mPort;
}
public int getLocalPort() {
return usock.getLocalPort();
}
public int getSSRC() {
return mSSRC;
}
/**
* Resets the reports (total number of bytes sent, number of packets sent, etc.)
*/
public void reset() {
mPacketCount = 0;
mOctetCount = 0;
setLong(mPacketCount, 20, 24);
setLong(mOctetCount, 24, 28);
delta = now = oldnow = 0;
}
private void setLong(long n, int begin, int end) {
for (end--; end >= begin; end--) {
mBuffer[end] = (byte) (n % 256);
n >>= 8;
}
}
/**
* Sends the RTCP packet over the network.
*
* @param ntpts
* the NTP timestamp.
* @param rtpts
* the RTP timestamp.
*/
private void send(long ntpts, long rtpts) throws IOException {
long hb = ntpts/1000000000;
long lb = ( ( ntpts - hb*1000000000 ) * 4294967296L )/1000000000;
setLong(hb, 8, 12);
setLong(lb, 12, 16);
setLong(rtpts, 16, 20);
if (mTransport == TRANSPORT_UDP) {
upack.setLength(PACKET_LENGTH);
usock.send(upack);
} else {
synchronized (mOutputStream) {
try {
mOutputStream.write(mTcpHeader);
mOutputStream.write(mBuffer, 0, PACKET_LENGTH);
} catch (Exception e) {}
}
}
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtp;
import java.io.IOException;
import net.majorkernelpanic.streaming.audio.AACStream;
import android.os.SystemClock;
import android.util.Log;
/**
*
* RFC 3640.
*
* This packetizer must be fed with an InputStream containing ADTS AAC.
* AAC will basically be rewrapped in an RTP stream and sent over the network.
* This packetizer only implements the aac-hbr mode (High Bit-rate AAC) and
* each packet only carry a single and complete AAC access unit.
*
*/
public class AACADTSPacketizer extends AbstractPacketizer implements Runnable {
private final static String TAG = "AACADTSPacketizer";
private Thread t;
private int samplingRate = 8000;
public AACADTSPacketizer() {
super();
}
public void start() {
if (t==null) {
t = new Thread(this);
t.start();
}
}
public void stop() {
if (t != null) {
try {
is.close();
} catch (IOException ignore) {}
t.interrupt();
try {
t.join();
} catch (InterruptedException e) {}
t = null;
}
}
public void setSamplingRate(int samplingRate) {
this.samplingRate = samplingRate;
socket.setClockFrequency(samplingRate);
}
public void run() {
Log.d(TAG,"AAC ADTS packetizer started !");
// "A packet SHALL carry either one or more complete Access Units, or a
// single fragment of an Access Unit. Fragments of the same Access Unit
// have the same time stamp but different RTP sequence numbers. The
// marker bit in the RTP header is 1 on the last fragment of an Access
// Unit, and 0 on all other fragments." RFC 3640
// ADTS header fields that we need to parse
boolean protection;
int frameLength, sum, length, nbau, nbpk, samplingRateIndex, profile;
long oldtime = SystemClock.elapsedRealtime(), now = oldtime;
byte[] header = new byte[8];
try {
while (!Thread.interrupted()) {
// Synchronisation: ADTS packet starts with 12bits set to 1
while (true) {
if ( (is.read()&0xFF) == 0xFF ) {
header[1] = (byte) is.read();
if ( (header[1]&0xF0) == 0xF0) break;
}
}
// Parse adts header (ADTS packets start with a 7 or 9 byte long header)
fill(header, 2, 5);
// The protection bit indicates whether or not the header contains the two extra bytes
protection = (header[1]&0x01)>0 ? true : false;
frameLength = (header[3]&0x03) << 11 |
(header[4]&0xFF) << 3 |
(header[5]&0xFF) >> 5 ;
frameLength -= (protection ? 7 : 9);
// Number of AAC frames in the ADTS frame
nbau = (header[6]&0x03) + 1;
// The number of RTP packets that will be sent for this ADTS frame
nbpk = frameLength/MAXPACKETSIZE + 1;
// Read CRS if any
if (!protection) is.read(header,0,2);
samplingRate = AACStream.AUDIO_SAMPLING_RATES[(header[2]&0x3C) >> 2];
profile = ( (header[2]&0xC0) >> 6 ) + 1 ;
// We update the RTP timestamp
ts += 1024L*1000000000L/samplingRate; //stats.average();
//Log.d(TAG,"frameLength: "+frameLength+" protection: "+protection+" p: "+profile+" sr: "+samplingRate);
sum = 0;
while (sum<frameLength) {
buffer = socket.requestBuffer();
socket.updateTimestamp(ts);
// Read frame
if (frameLength-sum > MAXPACKETSIZE-rtphl-4) {
length = MAXPACKETSIZE-rtphl-4;
}
else {
length = frameLength-sum;
socket.markNextPacket();
}
sum += length;
fill(buffer, rtphl+4, length);
// AU-headers-length field: contains the size in bits of a AU-header
// 13+3 = 16 bits -> 13bits for AU-size and 3bits for AU-Index / AU-Index-delta
// 13 bits will be enough because ADTS uses 13 bits for frame length
buffer[rtphl] = 0;
buffer[rtphl+1] = 0x10;
// AU-size
buffer[rtphl+2] = (byte) (frameLength>>5);
buffer[rtphl+3] = (byte) (frameLength<<3);
// AU-Index
buffer[rtphl+3] &= 0xF8;
buffer[rtphl+3] |= 0x00;
send(rtphl+4+length);
}
}
} catch (IOException e) {
// Ignore
} catch (ArrayIndexOutOfBoundsException e) {
Log.e(TAG,"ArrayIndexOutOfBoundsException: "+(e.getMessage()!=null?e.getMessage():"unknown error"));
e.printStackTrace();
} catch (InterruptedException ignore) {}
Log.d(TAG,"AAC ADTS packetizer stopped !");
}
private int fill(byte[] buffer, int offset,int length) throws IOException {
int sum = 0, len;
while (sum<length) {
len = is.read(buffer, offset+sum, length-sum);
if (len<0) {
throw new IOException("End of stream");
}
else sum+=len;
}
return sum;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtp;
import java.io.IOException;
import android.annotation.SuppressLint;
import android.media.MediaCodec.BufferInfo;
import android.os.SystemClock;
import android.util.Log;
/**
* RFC 3640.
*
* Encapsulates AAC Access Units in RTP packets as specified in the RFC 3640.
* This packetizer is used by the AACStream class in conjunction with the
* MediaCodec API introduced in Android 4.1 (API Level 16).
*
*/
@SuppressLint("NewApi")
public class AACLATMPacketizer extends AbstractPacketizer implements Runnable {
private final static String TAG = "AACLATMPacketizer";
private Thread t;
public AACLATMPacketizer() {
super();
socket.setCacheSize(0);
}
public void start() {
if (t==null) {
t = new Thread(this);
t.start();
}
}
public void stop() {
if (t != null) {
try {
is.close();
} catch (IOException ignore) {}
t.interrupt();
try {
t.join();
} catch (InterruptedException e) {}
t = null;
}
}
public void setSamplingRate(int samplingRate) {
socket.setClockFrequency(samplingRate);
}
@SuppressLint("NewApi")
public void run() {
Log.d(TAG,"AAC LATM packetizer started !");
int length = 0;
long oldts;
BufferInfo bufferInfo;
try {
while (!Thread.interrupted()) {
buffer = socket.requestBuffer();
length = is.read(buffer, rtphl+4, MAXPACKETSIZE-(rtphl+4));
if (length>0) {
bufferInfo = ((MediaCodecInputStream)is).getLastBufferInfo();
//Log.d(TAG,"length: "+length+" ts: "+bufferInfo.presentationTimeUs);
oldts = ts;
ts = bufferInfo.presentationTimeUs*1000;
// Seems to happen sometimes
if (oldts>ts) {
socket.commitBuffer();
continue;
}
socket.markNextPacket();
socket.updateTimestamp(ts);
// AU-headers-length field: contains the size in bits of a AU-header
// 13+3 = 16 bits -> 13bits for AU-size and 3bits for AU-Index / AU-Index-delta
// 13 bits will be enough because ADTS uses 13 bits for frame length
buffer[rtphl] = 0;
buffer[rtphl+1] = 0x10;
// AU-size
buffer[rtphl+2] = (byte) (length>>5);
buffer[rtphl+3] = (byte) (length<<3);
// AU-Index
buffer[rtphl+3] &= 0xF8;
buffer[rtphl+3] |= 0x00;
send(rtphl+length+4);
} else {
socket.commitBuffer();
}
}
} catch (IOException e) {
} catch (ArrayIndexOutOfBoundsException e) {
Log.e(TAG,"ArrayIndexOutOfBoundsException: "+(e.getMessage()!=null?e.getMessage():"unknown error"));
e.printStackTrace();
} catch (InterruptedException ignore) {}
Log.d(TAG,"AAC LATM packetizer stopped !");
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtp;
import java.io.IOException;
import android.util.Log;
/**
*
* RFC 3267.
*
* AMR Streaming over RTP.
*
* Must be fed with an InputStream containing raw AMR NB
* Stream must begin with a 6 bytes long header: "#!AMR\n", it will be skipped
*
*/
public class AMRNBPacketizer extends AbstractPacketizer implements Runnable {
public final static String TAG = "AMRNBPacketizer";
private final int AMR_HEADER_LENGTH = 6; // "#!AMR\n"
private static final int AMR_FRAME_HEADER_LENGTH = 1; // Each frame has a short header
private static final int[] sFrameBits = {95, 103, 118, 134, 148, 159, 204, 244};
private int samplingRate = 8000;
private Thread t;
public AMRNBPacketizer() {
super();
socket.setClockFrequency(samplingRate);
}
public void start() {
if (t==null) {
t = new Thread(this);
t.start();
}
}
public void stop() {
if (t != null) {
try {
is.close();
} catch (IOException ignore) {}
t.interrupt();
try {
t.join();
} catch (InterruptedException e) {}
t = null;
}
}
public void run() {
int frameLength, frameType;
long now = System.nanoTime(), oldtime = now;
byte[] header = new byte[AMR_HEADER_LENGTH];
try {
// Skip raw AMR header
fill(header,0,AMR_HEADER_LENGTH);
if (header[5] != '\n') {
Log.e(TAG,"Bad header ! AMR not correcty supported by the phone !");
return;
}
while (!Thread.interrupted()) {
buffer = socket.requestBuffer();
buffer[rtphl] = (byte) 0xF0;
// First we read the frame header
fill(buffer, rtphl+1,AMR_FRAME_HEADER_LENGTH);
// Then we calculate the frame payload length
frameType = (Math.abs(buffer[rtphl + 1]) >> 3) & 0x0f;
frameLength = (sFrameBits[frameType]+7)/8;
// And we read the payload
fill(buffer, rtphl+2,frameLength);
//Log.d(TAG,"Frame length: "+frameLength+" frameType: "+frameType);
// RFC 3267 Page 14: "For AMR, the sampling frequency is 8 kHz"
// FIXME: Is this really always the case ??
ts += 160L*1000000000L/samplingRate; //stats.average();
socket.updateTimestamp(ts);
socket.markNextPacket();
//Log.d(TAG,"expected: "+ expected + " measured: "+measured);
send(rtphl+1+AMR_FRAME_HEADER_LENGTH+frameLength);
}
} catch (IOException e) {
} catch (InterruptedException e) {}
Log.d(TAG,"AMR packetizer stopped !");
}
private int fill(byte[] buffer, int offset,int length) throws IOException {
int sum = 0, len;
while (sum<length) {
len = is.read(buffer, offset+sum, length-sum);
if (len<0) {
throw new IOException("End of stream");
}
else sum+=len;
}
return sum;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtp;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.util.Random;
import net.majorkernelpanic.streaming.rtcp.SenderReport;
/**
*
* Each packetizer inherits from this one and therefore uses RTP and UDP.
*
*/
abstract public class AbstractPacketizer {
protected static final int rtphl = RtpSocket.RTP_HEADER_LENGTH;
// Maximum size of RTP packets
protected final static int MAXPACKETSIZE = RtpSocket.MTU-28;
protected RtpSocket socket = null;
protected InputStream is = null;
protected byte[] buffer;
protected long ts = 0;
public AbstractPacketizer() {
int ssrc = new Random().nextInt();
ts = new Random().nextInt();
socket = new RtpSocket();
socket.setSSRC(ssrc);
}
public RtpSocket getRtpSocket() {
return socket;
}
public void setSSRC(int ssrc) {
socket.setSSRC(ssrc);
}
public int getSSRC() {
return socket.getSSRC();
}
public void setInputStream(InputStream is) {
this.is = is;
}
public void setTimeToLive(int ttl) throws IOException {
socket.setTimeToLive(ttl);
}
/**
* Sets the destination of the stream.
* @param dest The destination address of the stream
* @param rtpPort Destination port that will be used for RTP
* @param rtcpPort Destination port that will be used for RTCP
*/
public void setDestination(InetAddress dest, int rtpPort, int rtcpPort) {
socket.setDestination(dest, rtpPort, rtcpPort);
}
/** Starts the packetizer. */
public abstract void start();
/** Stops the packetizer. */
public abstract void stop();
/** Updates data for RTCP SR and sends the packet. */
protected void send(int length) throws IOException {
socket.commitBuffer(length);
}
/** For debugging purposes. */
protected static String printBuffer(byte[] buffer, int start,int end) {
String str = "";
for (int i=start;i<end;i++) str+=","+Integer.toHexString(buffer[i]&0xFF);
return str;
}
/** Used in packetizers to estimate timestamps in RTP packets. */
protected static class Statistics {
public final static String TAG = "Statistics";
private int count=700, c = 0;
private float m = 0, q = 0;
private long elapsed = 0;
private long start = 0;
private long duration = 0;
private long period = 10000000000L;
private boolean initoffset = false;
public Statistics() {}
public Statistics(int count, int period) {
this.count = count;
this.period = period;
}
public void reset() {
initoffset = false;
q = 0; m = 0; c = 0;
elapsed = 0;
start = 0;
duration = 0;
}
public void push(long value) {
elapsed += value;
if (elapsed>period) {
elapsed = 0;
long now = System.nanoTime();
if (!initoffset || (now - start < 0)) {
start = now;
duration = 0;
initoffset = true;
}
// Prevents drifting issues by comparing the real duration of the
// stream with the sum of all temporal lengths of RTP packets.
value += (now - start) - duration;
//Log.d(TAG, "sum1: "+duration/1000000+" sum2: "+(now-start)/1000000+" drift: "+((now-start)-duration)/1000000+" v: "+value/1000000);
}
if (c<5) {
// We ignore the first 20 measured values because they may not be accurate
c++;
m = value;
} else {
m = (m*q+value)/(q+1);
if (q<count) q++;
}
}
public long average() {
long l = (long)m;
duration += l;
return l;
}
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtp;
import java.io.IOException;
import android.util.Log;
/**
* RFC 4629.
*
* H.263 Streaming over RTP.
*
* Must be fed with an InputStream containing H.263 frames.
* The stream must start with mpeg4 or 3gpp header, it will be skipped.
*
*/
public class H263Packetizer extends AbstractPacketizer implements Runnable {
public final static String TAG = "H263Packetizer";
private Statistics stats = new Statistics();
private Thread t;
public H263Packetizer() {
super();
socket.setClockFrequency(90000);
}
public void start() {
if (t==null) {
t = new Thread(this);
t.start();
}
}
public void stop() {
if (t != null) {
try {
is.close();
} catch (IOException ignore) {}
t.interrupt();
try {
t.join();
} catch (InterruptedException e) {}
t = null;
}
}
public void run() {
long time, duration = 0;
int i = 0, j = 0, tr;
boolean firstFragment = true;
byte[] nextBuffer;
stats.reset();
try {
while (!Thread.interrupted()) {
if (j==0) buffer = socket.requestBuffer();
socket.updateTimestamp(ts);
// Each packet we send has a two byte long header (See section 5.1 of RFC 4629)
buffer[rtphl] = 0;
buffer[rtphl+1] = 0;
time = System.nanoTime();
if (fill(rtphl+j+2,MAXPACKETSIZE-rtphl-j-2)<0) return;
duration += System.nanoTime() - time;
j = 0;
// Each h263 frame starts with: 0000 0000 0000 0000 1000 00??
// Here we search where the next frame begins in the bit stream
for (i=rtphl+2;i<MAXPACKETSIZE-1;i++) {
if (buffer[i]==0 && buffer[i+1]==0 && (buffer[i+2]&0xFC)==0x80) {
j=i;
break;
}
}
// Parse temporal reference
tr = (buffer[i+2]&0x03)<<6 | (buffer[i+3]&0xFF)>>2;
//Log.d(TAG,"j: "+j+" buffer: "+printBuffer(rtphl, rtphl+5)+" tr: "+tr);
if (firstFragment) {
// This is the first fragment of the frame -> header is set to 0x0400
buffer[rtphl] = 4;
firstFragment = false;
} else {
buffer[rtphl] = 0;
}
if (j>0) {
// We have found the end of the frame
stats.push(duration);
ts+= stats.average(); duration = 0;
//Log.d(TAG,"End of frame ! duration: "+stats.average());
// The last fragment of a frame has to be marked
socket.markNextPacket();
send(j);
nextBuffer = socket.requestBuffer();
System.arraycopy(buffer,j+2,nextBuffer,rtphl+2,MAXPACKETSIZE-j-2);
buffer = nextBuffer;
j = MAXPACKETSIZE-j-2;
firstFragment = true;
} else {
// We have not found the beginning of another frame
// The whole packet is a fragment of a frame
send(MAXPACKETSIZE);
}
}
} catch (IOException e) {
} catch (InterruptedException e) {}
Log.d(TAG,"H263 Packetizer stopped !");
}
private int fill(int offset,int length) throws IOException {
int sum = 0, len;
while (sum<length) {
len = is.read(buffer, offset+sum, length-sum);
if (len<0) {
throw new IOException("End of stream");
}
else sum+=len;
}
return sum;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtp;
import java.io.IOException;
import android.annotation.SuppressLint;
import android.util.Log;
/**
*
* RFC 3984.
*
* H.264 streaming over RTP.
*
* Must be fed with an InputStream containing H.264 NAL units preceded by their length (4 bytes).
* The stream must start with mpeg4 or 3gpp header, it will be skipped.
*
*/
public class H264Packetizer extends AbstractPacketizer implements Runnable {
public final static String TAG = "H264Packetizer";
private Thread t = null;
private int naluLength = 0;
private long delay = 0, oldtime = 0;
private Statistics stats = new Statistics();
private byte[] sps = null, pps = null, stapa = null;
byte[] header = new byte[5];
private int count = 0;
private int streamType = 1;
public H264Packetizer() {
super();
socket.setClockFrequency(90000);
}
public void start() {
if (t == null) {
t = new Thread(this);
t.start();
}
}
public void stop() {
if (t != null) {
try {
is.close();
} catch (IOException e) {}
t.interrupt();
try {
t.join();
} catch (InterruptedException e) {}
t = null;
}
}
public void setStreamParameters(byte[] pps, byte[] sps) {
this.pps = pps;
this.sps = sps;
// A STAP-A NAL (NAL type 24) containing the sps and pps of the stream
if (pps != null && sps != null) {
// STAP-A NAL header + NALU 1 (SPS) size + NALU 2 (PPS) size = 5 bytes
stapa = new byte[sps.length + pps.length + 5];
// STAP-A NAL header is 24
stapa[0] = 24;
// Write NALU 1 size into the array (NALU 1 is the SPS).
stapa[1] = (byte) (sps.length >> 8);
stapa[2] = (byte) (sps.length & 0xFF);
// Write NALU 2 size into the array (NALU 2 is the PPS).
stapa[sps.length + 3] = (byte) (pps.length >> 8);
stapa[sps.length + 4] = (byte) (pps.length & 0xFF);
// Write NALU 1 into the array, then write NALU 2 into the array.
System.arraycopy(sps, 0, stapa, 3, sps.length);
System.arraycopy(pps, 0, stapa, 5 + sps.length, pps.length);
}
}
public void run() {
long duration = 0;
Log.d(TAG,"H264 packetizer started !");
stats.reset();
count = 0;
if (is instanceof MediaCodecInputStream) {
streamType = 1;
socket.setCacheSize(0);
} else {
streamType = 0;
socket.setCacheSize(400);
}
try {
while (!Thread.interrupted()) {
oldtime = System.nanoTime();
// We read a NAL units from the input stream and we send them
send();
// We measure how long it took to receive NAL units from the phone
duration = System.nanoTime() - oldtime;
stats.push(duration);
// Computes the average duration of a NAL unit
delay = stats.average();
//Log.d(TAG,"duration: "+duration/1000000+" delay: "+delay/1000000);
}
} catch (IOException e) {
} catch (InterruptedException e) {}
Log.d(TAG,"H264 packetizer stopped !");
}
/**
* Reads a NAL unit in the FIFO and sends it.
* If it is too big, we split it in FU-A units (RFC 3984).
*/
@SuppressLint("NewApi")
private void send() throws IOException, InterruptedException {
int sum = 1, len = 0, type;
if (streamType == 0) {
// NAL units are preceeded by their length, we parse the length
fill(header,0,5);
ts += delay;
naluLength = header[3]&0xFF | (header[2]&0xFF)<<8 | (header[1]&0xFF)<<16 | (header[0]&0xFF)<<24;
if (naluLength>100000 || naluLength<0) resync();
} else if (streamType == 1) {
// NAL units are preceeded with 0x00000001
fill(header,0,5);
ts = ((MediaCodecInputStream)is).getLastBufferInfo().presentationTimeUs*1000L;
//ts += delay;
naluLength = is.available()+1;
if (!(header[0]==0 && header[1]==0 && header[2]==0)) {
// Turns out, the NAL units are not preceeded with 0x00000001
Log.e(TAG, "NAL units are not preceeded by 0x00000001");
streamType = 2;
return;
}
} else {
// Nothing preceededs the NAL units
fill(header,0,1);
header[4] = header[0];
ts = ((MediaCodecInputStream)is).getLastBufferInfo().presentationTimeUs*1000L;
//ts += delay;
naluLength = is.available()+1;
}
// Parses the NAL unit type
type = header[4]&0x1F;
// The stream already contains NAL unit type 7 or 8, we don't need
// to add them to the stream ourselves
if (type == 7 || type == 8) {
Log.v(TAG,"SPS or PPS present in the stream.");
count++;
if (count>4) {
sps = null;
pps = null;
}
}
// We send two packets containing NALU type 7 (SPS) and 8 (PPS)
// Those should allow the H264 stream to be decoded even if no SDP was sent to the decoder.
if (type == 5 && sps != null && pps != null) {
buffer = socket.requestBuffer();
socket.markNextPacket();
socket.updateTimestamp(ts);
System.arraycopy(stapa, 0, buffer, rtphl, stapa.length);
super.send(rtphl+stapa.length);
}
//Log.d(TAG,"- Nal unit length: " + naluLength + " delay: "+delay/1000000+" type: "+type);
// Small NAL unit => Single NAL unit
if (naluLength<=MAXPACKETSIZE-rtphl-2) {
buffer = socket.requestBuffer();
buffer[rtphl] = header[4];
len = fill(buffer, rtphl+1, naluLength-1);
socket.updateTimestamp(ts);
socket.markNextPacket();
super.send(naluLength+rtphl);
//Log.d(TAG,"----- Single NAL unit - len:"+len+" delay: "+delay);
}
// Large NAL unit => Split nal unit
else {
// Set FU-A header
header[1] = (byte) (header[4] & 0x1F); // FU header type
header[1] += 0x80; // Start bit
// Set FU-A indicator
header[0] = (byte) ((header[4] & 0x60) & 0xFF); // FU indicator NRI
header[0] += 28;
while (sum < naluLength) {
buffer = socket.requestBuffer();
buffer[rtphl] = header[0];
buffer[rtphl+1] = header[1];
socket.updateTimestamp(ts);
if ((len = fill(buffer, rtphl+2, naluLength-sum > MAXPACKETSIZE-rtphl-2 ? MAXPACKETSIZE-rtphl-2 : naluLength-sum ))<0) return; sum += len;
// Last packet before next NAL
if (sum >= naluLength) {
// End bit on
buffer[rtphl+1] += 0x40;
socket.markNextPacket();
}
super.send(len+rtphl+2);
// Switch start bit
header[1] = (byte) (header[1] & 0x7F);
//Log.d(TAG,"----- FU-A unit, sum:"+sum);
}
}
}
private int fill(byte[] buffer, int offset,int length) throws IOException {
int sum = 0, len;
while (sum<length) {
len = is.read(buffer, offset+sum, length-sum);
if (len<0) {
throw new IOException("End of stream");
}
else sum+=len;
}
return sum;
}
private void resync() throws IOException {
int type;
Log.e(TAG,"Packetizer out of sync ! Let's try to fix that...(NAL length: "+naluLength+")");
while (true) {
header[0] = header[1];
header[1] = header[2];
header[2] = header[3];
header[3] = header[4];
header[4] = (byte) is.read();
type = header[4]&0x1F;
if (type == 5 || type == 1) {
naluLength = header[3]&0xFF | (header[2]&0xFF)<<8 | (header[1]&0xFF)<<16 | (header[0]&0xFF)<<24;
if (naluLength>0 && naluLength<100000) {
oldtime = System.nanoTime();
Log.e(TAG,"A NAL unit may have been found in the bit stream !");
break;
}
if (naluLength==0) {
Log.e(TAG,"NAL unit with NULL size found...");
} else if (header[3]==0xFF && header[2]==0xFF && header[1]==0xFF && header[0]==0xFF) {
Log.e(TAG,"NAL unit with 0xFFFFFFFF size found...");
}
}
}
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtp;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
import android.util.Log;
/**
* An InputStream that uses data from a MediaCodec.
* The purpose of this class is to interface existing RTP packetizers of
* libstreaming with the new MediaCodec API. This class is not thread safe !
*/
@SuppressLint("NewApi")
public class MediaCodecInputStream extends InputStream {
public final String TAG = "MediaCodecInputStream";
private MediaCodec mMediaCodec = null;
private BufferInfo mBufferInfo = new BufferInfo();
private ByteBuffer[] mBuffers = null;
private ByteBuffer mBuffer = null;
private int mIndex = -1;
private boolean mClosed = false;
public MediaFormat mMediaFormat;
public MediaCodecInputStream(MediaCodec mediaCodec) {
mMediaCodec = mediaCodec;
mBuffers = mMediaCodec.getOutputBuffers();
}
@Override
public void close() {
mClosed = true;
}
@Override
public int read() throws IOException {
return 0;
}
@Override
public int read(byte[] buffer, int offset, int length) throws IOException {
int min = 0;
try {
if (mBuffer==null) {
while (!Thread.interrupted() && !mClosed) {
mIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 500000);
if (mIndex>=0 ){
//Log.d(TAG,"Index: "+mIndex+" Time: "+mBufferInfo.presentationTimeUs+" size: "+mBufferInfo.size);
mBuffer = mBuffers[mIndex];
mBuffer.position(0);
break;
} else if (mIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
mBuffers = mMediaCodec.getOutputBuffers();
} else if (mIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
mMediaFormat = mMediaCodec.getOutputFormat();
Log.i(TAG,mMediaFormat.toString());
} else if (mIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.v(TAG,"No buffer available...");
//return 0;
} else {
Log.e(TAG,"Message: "+mIndex);
//return 0;
}
}
}
if (mClosed) throw new IOException("This InputStream was closed");
min = length < mBufferInfo.size - mBuffer.position() ? length : mBufferInfo.size - mBuffer.position();
mBuffer.get(buffer, offset, min);
if (mBuffer.position()>=mBufferInfo.size) {
mMediaCodec.releaseOutputBuffer(mIndex, false);
mBuffer = null;
}
} catch (RuntimeException e) {
e.printStackTrace();
}
return min;
}
public int available() {
if (mBuffer != null)
return mBufferInfo.size - mBuffer.position();
else
return 0;
}
public BufferInfo getLastBufferInfo() {
return mBufferInfo;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtp;
import java.io.IOException;
import java.io.OutputStream;
import java.net.DatagramPacket;
import java.net.InetAddress;
import java.net.MulticastSocket;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import net.majorkernelpanic.streaming.rtcp.SenderReport;
import android.os.SystemClock;
import android.util.Log;
/**
* A basic implementation of an RTP socket.
* It implements a buffering mechanism, relying on a FIFO of buffers and a Thread.
* That way, if a packetizer tries to send many packets too quickly, the FIFO will
* grow and packets will be sent one by one smoothly.
*/
public class RtpSocket implements Runnable {
public static final String TAG = "RtpSocket";
/** Use this to use UDP for the transport protocol. */
public final static int TRANSPORT_UDP = 0x00;
/** Use this to use TCP for the transport protocol. */
public final static int TRANSPORT_TCP = 0x01;
public static final int RTP_HEADER_LENGTH = 12;
public static final int MTU = 1300;
private MulticastSocket mSocket;
private DatagramPacket[] mPackets;
private byte[][] mBuffers;
private long[] mTimestamps;
private SenderReport mReport;
private Semaphore mBufferRequested, mBufferCommitted;
private Thread mThread;
private int mTransport;
private long mCacheSize;
private long mClock = 0;
private long mOldTimestamp = 0;
private int mSsrc, mSeq = 0, mPort = -1;
private int mBufferCount, mBufferIn, mBufferOut;
private int mCount = 0;
private byte mTcpHeader[];
protected OutputStream mOutputStream = null;
private AverageBitrate mAverageBitrate;
/**
* This RTP socket implements a buffering mechanism relying on a FIFO of buffers and a Thread.
* @throws IOException
*/
public RtpSocket() {
mCacheSize = 0;
mBufferCount = 300; // TODO: readjust that when the FIFO is full
mBuffers = new byte[mBufferCount][];
mPackets = new DatagramPacket[mBufferCount];
mReport = new SenderReport();
mAverageBitrate = new AverageBitrate();
mTransport = TRANSPORT_UDP;
mTcpHeader = new byte[] {'$',0,0,0};
resetFifo();
for (int i=0; i<mBufferCount; i++) {
mBuffers[i] = new byte[MTU];
mPackets[i] = new DatagramPacket(mBuffers[i], 1);
/* Version(2) Padding(0) */
/* ^ ^ Extension(0) */
/* | | ^ */
/* | -------- | */
/* | |--------------------- */
/* | || -----------------------> Source Identifier(0) */
/* | || | */
mBuffers[i][0] = (byte) Integer.parseInt("10000000",2);
/* Payload Type */
mBuffers[i][1] = (byte) 96;
/* Byte 2,3 -> Sequence Number */
/* Byte 4,5,6,7 -> Timestamp */
/* Byte 8,9,10,11 -> Sync Source Identifier */
}
try {
mSocket = new MulticastSocket();
} catch (Exception e) {
throw new RuntimeException(e.getMessage());
}
}
private void resetFifo() {
mCount = 0;
mBufferIn = 0;
mBufferOut = 0;
mTimestamps = new long[mBufferCount];
mBufferRequested = new Semaphore(mBufferCount);
mBufferCommitted = new Semaphore(0);
mReport.reset();
mAverageBitrate.reset();
}
/** Closes the underlying socket. */
public void close() {
mSocket.close();
}
/** Sets the SSRC of the stream. */
public void setSSRC(int ssrc) {
this.mSsrc = ssrc;
for (int i=0;i<mBufferCount;i++) {
setLong(mBuffers[i], ssrc,8,12);
}
mReport.setSSRC(mSsrc);
}
/** Returns the SSRC of the stream. */
public int getSSRC() {
return mSsrc;
}
/** Sets the clock frequency of the stream in Hz. */
public void setClockFrequency(long clock) {
mClock = clock;
}
/** Sets the size of the FIFO in ms. */
public void setCacheSize(long cacheSize) {
mCacheSize = cacheSize;
}
/** Sets the Time To Live of the UDP packets. */
public void setTimeToLive(int ttl) throws IOException {
mSocket.setTimeToLive(ttl);
}
/** Sets the destination address and to which the packets will be sent. */
public void setDestination(InetAddress dest, int dport, int rtcpPort) {
if (dport != 0 && rtcpPort != 0) {
mTransport = TRANSPORT_UDP;
mPort = dport;
for (int i=0;i<mBufferCount;i++) {
mPackets[i].setPort(dport);
mPackets[i].setAddress(dest);
}
mReport.setDestination(dest, rtcpPort);
}
}
/**
* If a TCP is used as the transport protocol for the RTP session,
* the output stream to which RTP packets will be written to must
* be specified with this method.
*/
public void setOutputStream(OutputStream outputStream, byte channelIdentifier) {
if (outputStream != null) {
mTransport = TRANSPORT_TCP;
mOutputStream = outputStream;
mTcpHeader[1] = channelIdentifier;
mReport.setOutputStream(outputStream, (byte) (channelIdentifier+1));
}
}
public int getPort() {
return mPort;
}
public int[] getLocalPorts() {
return new int[] {
mSocket.getLocalPort(),
mReport.getLocalPort()
};
}
/**
* Returns an available buffer from the FIFO, it can then be modified.
* Call {@link #commitBuffer(int)} to send it over the network.
* @throws InterruptedException
**/
public byte[] requestBuffer() throws InterruptedException {
mBufferRequested.acquire();
mBuffers[mBufferIn][1] &= 0x7F;
return mBuffers[mBufferIn];
}
/** Puts the buffer back into the FIFO without sending the packet. */
public void commitBuffer() throws IOException {
if (mThread == null) {
mThread = new Thread(this);
mThread.start();
}
if (++mBufferIn>=mBufferCount) mBufferIn = 0;
mBufferCommitted.release();
}
/** Sends the RTP packet over the network. */
public void commitBuffer(int length) throws IOException {
updateSequence();
mPackets[mBufferIn].setLength(length);
mAverageBitrate.push(length);
if (++mBufferIn>=mBufferCount) mBufferIn = 0;
mBufferCommitted.release();
if (mThread == null) {
mThread = new Thread(this);
mThread.start();
}
}
/** Returns an approximation of the bitrate of the RTP stream in bits per second. */
public long getBitrate() {
return mAverageBitrate.average();
}
/** Increments the sequence number. */
private void updateSequence() {
setLong(mBuffers[mBufferIn], ++mSeq, 2, 4);
}
/**
* Overwrites the timestamp in the packet.
* @param timestamp The new timestamp in ns.
**/
public void updateTimestamp(long timestamp) {
mTimestamps[mBufferIn] = timestamp;
setLong(mBuffers[mBufferIn], (timestamp/100L)*(mClock/1000L)/10000L, 4, 8);
}
/** Sets the marker in the RTP packet. */
public void markNextPacket() {
mBuffers[mBufferIn][1] |= 0x80;
}
/** The Thread sends the packets in the FIFO one by one at a constant rate. */
@Override
public void run() {
Statistics stats = new Statistics(50,3000);
try {
// Caches mCacheSize milliseconds of the stream in the FIFO.
Thread.sleep(mCacheSize);
long delta = 0;
while (mBufferCommitted.tryAcquire(4,TimeUnit.SECONDS)) {
if (mOldTimestamp != 0) {
// We use our knowledge of the clock rate of the stream and the difference between two timestamps to
// compute the time lapse that the packet represents.
if ((mTimestamps[mBufferOut]-mOldTimestamp)>0) {
stats.push(mTimestamps[mBufferOut]-mOldTimestamp);
long d = stats.average()/1000000;
//Log.d(TAG,"delay: "+d+" d: "+(mTimestamps[mBufferOut]-mOldTimestamp)/1000000);
// We ensure that packets are sent at a constant and suitable rate no matter how the RtpSocket is used.
if (mCacheSize>0) Thread.sleep(d);
} else if ((mTimestamps[mBufferOut]-mOldTimestamp)<0) {
Log.e(TAG, "TS: "+mTimestamps[mBufferOut]+" OLD: "+mOldTimestamp);
}
delta += mTimestamps[mBufferOut]-mOldTimestamp;
if (delta>500000000 || delta<0) {
//Log.d(TAG,"permits: "+mBufferCommitted.availablePermits());
delta = 0;
}
}
mReport.update(mPackets[mBufferOut].getLength(), (mTimestamps[mBufferOut]/100L)*(mClock/1000L)/10000L);
mOldTimestamp = mTimestamps[mBufferOut];
if (mCount++>30) {
if (mTransport == TRANSPORT_UDP) {
mSocket.send(mPackets[mBufferOut]);
} else {
sendTCP();
}
}
if (++mBufferOut>=mBufferCount) mBufferOut = 0;
mBufferRequested.release();
}
} catch (Exception e) {
e.printStackTrace();
}
mThread = null;
resetFifo();
}
private void sendTCP() {
synchronized (mOutputStream) {
int len = mPackets[mBufferOut].getLength();
Log.d(TAG,"sent "+len);
mTcpHeader[2] = (byte) (len>>8);
mTcpHeader[3] = (byte) (len&0xFF);
try {
mOutputStream.write(mTcpHeader);
mOutputStream.write(mBuffers[mBufferOut], 0, len);
} catch (Exception e) {}
}
}
private void setLong(byte[] buffer, long n, int begin, int end) {
for (end--; end >= begin; end--) {
buffer[end] = (byte) (n % 256);
n >>= 8;
}
}
/**
* Computes an average bit rate.
**/
protected static class AverageBitrate {
private final static long RESOLUTION = 200;
private long mOldNow, mNow, mDelta;
private long[] mElapsed, mSum;
private int mCount, mIndex, mTotal;
private int mSize;
public AverageBitrate() {
mSize = 5000/((int)RESOLUTION);
reset();
}
public AverageBitrate(int delay) {
mSize = delay/((int)RESOLUTION);
reset();
}
public void reset() {
mSum = new long[mSize];
mElapsed = new long[mSize];
mNow = SystemClock.elapsedRealtime();
mOldNow = mNow;
mCount = 0;
mDelta = 0;
mTotal = 0;
mIndex = 0;
}
public void push(int length) {
mNow = SystemClock.elapsedRealtime();
if (mCount>0) {
mDelta += mNow - mOldNow;
mTotal += length;
if (mDelta>RESOLUTION) {
mSum[mIndex] = mTotal;
mTotal = 0;
mElapsed[mIndex] = mDelta;
mDelta = 0;
mIndex++;
if (mIndex>=mSize) mIndex = 0;
}
}
mOldNow = mNow;
mCount++;
}
public int average() {
long delta = 0, sum = 0;
for (int i=0;i<mSize;i++) {
sum += mSum[i];
delta += mElapsed[i];
}
//Log.d(TAG, "Time elapsed: "+delta);
return (int) (delta>0?8000*sum/delta:0);
}
}
/** Computes the proper rate at which packets are sent. */
protected static class Statistics {
public final static String TAG = "Statistics";
private int count=500, c = 0;
private float m = 0, q = 0;
private long elapsed = 0;
private long start = 0;
private long duration = 0;
private long period = 6000000000L;
private boolean initoffset = false;
public Statistics(int count, long period) {
this.count = count;
this.period = period*1000000L;
}
public void push(long value) {
duration += value;
elapsed += value;
if (elapsed>period) {
elapsed = 0;
long now = System.nanoTime();
if (!initoffset || (now - start < 0)) {
start = now;
duration = 0;
initoffset = true;
}
value -= (now - start) - duration;
//Log.d(TAG, "sum1: "+duration/1000000+" sum2: "+(now-start)/1000000+" drift: "+((now-start)-duration)/1000000+" v: "+value/1000000);
}
if (c<40) {
// We ignore the first 40 measured values because they may not be accurate
c++;
m = value;
} else {
m = (m*q+value)/(q+1);
if (q<count) q++;
}
}
public long average() {
long l = (long)m-2000000;
return l>0 ? l : 0;
}
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtsp;
import java.io.IOException;
import java.io.InputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
class RtcpDeinterleaver extends InputStream implements Runnable {
public final static String TAG = "RtcpDeinterleaver";
private IOException mIOException;
private InputStream mInputStream;
private PipedInputStream mPipedInputStream;
private PipedOutputStream mPipedOutputStream;
private byte[] mBuffer;
public RtcpDeinterleaver(InputStream inputStream) {
mInputStream = inputStream;
mPipedInputStream = new PipedInputStream(4096);
try {
mPipedOutputStream = new PipedOutputStream(mPipedInputStream);
} catch (IOException e) {}
mBuffer = new byte[1024];
new Thread(this).start();
}
@Override
public void run() {
try {
while (true) {
int len = mInputStream.read(mBuffer, 0, 1024);
mPipedOutputStream.write(mBuffer, 0, len);
}
} catch (IOException e) {
try {
mPipedInputStream.close();
} catch (IOException ignore) {}
mIOException = e;
}
}
@Override
public int read(byte[] buffer) throws IOException {
if (mIOException != null) {
throw mIOException;
}
return mPipedInputStream.read(buffer);
}
@Override
public int read(byte[] buffer, int offset, int length) throws IOException {
if (mIOException != null) {
throw mIOException;
}
return mPipedInputStream.read(buffer, offset, length);
}
@Override
public int read() throws IOException {
if (mIOException != null) {
throw mIOException;
}
return mPipedInputStream.read();
}
@Override
public void close() throws IOException {
mInputStream.close();
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtsp;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.Socket;
import java.net.SocketException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
import java.util.Locale;
import java.util.concurrent.Semaphore;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.majorkernelpanic.streaming.Session;
import net.majorkernelpanic.streaming.Stream;
import net.majorkernelpanic.streaming.rtp.RtpSocket;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.util.Log;
/**
* RFC 2326.
* A basic and asynchronous RTSP client.
* The original purpose of this class was to implement a small RTSP client compatible with Wowza.
* It implements Digest Access Authentication according to RFC 2069.
*/
public class RtspClient {
public final static String TAG = "RtspClient";
/** Message sent when the connection to the RTSP server failed. */
public final static int ERROR_CONNECTION_FAILED = 0x01;
/** Message sent when the credentials are wrong. */
public final static int ERROR_WRONG_CREDENTIALS = 0x03;
/** Use this to use UDP for the transport protocol. */
public final static int TRANSPORT_UDP = RtpSocket.TRANSPORT_UDP;
/** Use this to use TCP for the transport protocol. */
public final static int TRANSPORT_TCP = RtpSocket.TRANSPORT_TCP;
/**
* Message sent when the connection with the RTSP server has been lost for
* some reason (for example, the user is going under a bridge).
* When the connection with the server is lost, the client will automatically try to
* reconnect as long as {@link #stopStream()} is not called.
**/
public final static int ERROR_CONNECTION_LOST = 0x04;
/**
* Message sent when the connection with the RTSP server has been reestablished.
* When the connection with the server is lost, the client will automatically try to
* reconnect as long as {@link #stopStream()} is not called.
*/
public final static int MESSAGE_CONNECTION_RECOVERED = 0x05;
private final static int STATE_STARTED = 0x00;
private final static int STATE_STARTING = 0x01;
private final static int STATE_STOPPING = 0x02;
private final static int STATE_STOPPED = 0x03;
private int mState = 0;
private class Parameters {
public String host;
public String username;
public String password;
public String path;
public Session session;
public int port;
public int transport;
public Parameters clone() {
Parameters params = new Parameters();
params.host = host;
params.username = username;
params.password = password;
params.path = path;
params.session = session;
params.port = port;
params.transport = transport;
return params;
}
}
private Parameters mTmpParameters;
private Parameters mParameters;
private int mCSeq;
private Socket mSocket;
private String mSessionID;
private String mAuthorization;
private BufferedReader mBufferedReader;
private OutputStream mOutputStream;
private Callback mCallback;
private Handler mMainHandler;
private Handler mHandler;
/**
* The callback interface you need to implement to know what's going on with the
* RTSP server (for example your Wowza Media Server).
*/
public interface Callback {
public void onRtspUpdate(int message, Exception exception);
}
public RtspClient() {
mCSeq = 0;
mTmpParameters = new Parameters();
mTmpParameters.port = 1935;
mTmpParameters.path = "/";
mTmpParameters.transport = TRANSPORT_UDP;
mAuthorization = null;
mCallback = null;
mMainHandler = new Handler(Looper.getMainLooper());
mState = STATE_STOPPED;
final Semaphore signal = new Semaphore(0);
new HandlerThread("net.majorkernelpanic.streaming.RtspClient"){
@Override
protected void onLooperPrepared() {
mHandler = new Handler();
signal.release();
}
}.start();
signal.acquireUninterruptibly();
}
/**
* Sets the callback interface that will be called on status updates of the connection
* with the RTSP server.
* @param cb The implementation of the {@link Callback} interface
*/
public void setCallback(Callback cb) {
mCallback = cb;
}
/**
* The {@link Session} that will be used to stream to the server.
* If not called before {@link #startStream()}, a it will be created.
*/
public void setSession(Session session) {
mTmpParameters.session = session;
}
public Session getSession() {
return mTmpParameters.session;
}
/**
* Sets the destination address of the RTSP server.
* @param host The destination address
* @param port The destination port
*/
public void setServerAddress(String host, int port) {
mTmpParameters.port = port;
mTmpParameters.host = host;
}
/**
* If authentication is enabled on the server, you need to call this with a valid login/password pair.
* Only implements Digest Access Authentication according to RFC 2069.
* @param username The login
* @param password The password
*/
public void setCredentials(String username, String password) {
mTmpParameters.username = username;
mTmpParameters.password = password;
}
/**
* The path to which the stream will be sent to.
* @param path The path
*/
public void setStreamPath(String path) {
mTmpParameters.path = path;
}
/**
* Call this with {@link #TRANSPORT_TCP} or {@value #TRANSPORT_UDP} to choose the
* transport protocol that will be used to send RTP/RTCP packets.
* Not ready yet !
*/
public void setTransportMode(int mode) {
mTmpParameters.transport = mode;
}
public boolean isStreaming() {
return mState==STATE_STARTED||mState==STATE_STARTING;
}
/**
* Connects to the RTSP server to publish the stream, and the effectively starts streaming.
* You need to call {@link #setServerAddress(String, int)} and optionally {@link #setSession(Session)}
* and {@link #setCredentials(String, String)} before calling this.
* Should be called of the main thread !
*/
public void startStream() {
if (mTmpParameters.host == null) throw new IllegalStateException("setServerAddress(String,int) has not been called !");
if (mTmpParameters.session == null) throw new IllegalStateException("setSession() has not been called !");
mHandler.post(new Runnable () {
@Override
public void run() {
if (mState != STATE_STOPPED) return;
mState = STATE_STARTING;
Log.d(TAG,"Connecting to RTSP server...");
// If the user calls some methods to configure the client, it won't modify its behavior until the stream is restarted
mParameters = mTmpParameters.clone();
mParameters.session.setDestination(mTmpParameters.host);
try {
mParameters.session.syncConfigure();
} catch (Exception e) {
mParameters.session = null;
mState = STATE_STOPPED;
return;
}
try {
tryConnection();
} catch (Exception e) {
postError(ERROR_CONNECTION_FAILED, e);
abort();
return;
}
try {
mParameters.session.syncStart();
mState = STATE_STARTED;
if (mParameters.transport == TRANSPORT_UDP) {
mHandler.post(mConnectionMonitor);
}
} catch (Exception e) {
abort();
}
}
});
}
/**
* Stops the stream, and informs the RTSP server.
*/
public void stopStream() {
mHandler.post(new Runnable () {
@Override
public void run() {
if (mParameters != null && mParameters.session != null) {
mParameters.session.stop();
}
if (mState != STATE_STOPPED) {
mState = STATE_STOPPING;
abort();
}
}
});
}
public void release() {
stopStream();
mHandler.getLooper().quit();
}
private void abort() {
try {
sendRequestTeardown();
} catch (Exception ignore) {}
try {
mSocket.close();
} catch (Exception ignore) {}
mHandler.removeCallbacks(mConnectionMonitor);
mHandler.removeCallbacks(mRetryConnection);
mState = STATE_STOPPED;
}
private void tryConnection() throws IOException {
mCSeq = 0;
mSocket = new Socket(mParameters.host, mParameters.port);
mBufferedReader = new BufferedReader(new InputStreamReader(mSocket.getInputStream()));
mOutputStream = new BufferedOutputStream(mSocket.getOutputStream());
sendRequestAnnounce();
sendRequestSetup();
Log.i("tryConnection : ", "rec");
sendRequestRecord();
}
/**
* Forges and sends the ANNOUNCE request
*/
private void sendRequestAnnounce() throws IllegalStateException, SocketException, IOException {
String body = mParameters.session.getSessionDescription();
String request = "ANNOUNCE rtsp://"+mParameters.host+":"+mParameters.port+mParameters.path+" RTSP/1.0\r\n" +
"CSeq: " + (++mCSeq) + "\r\n" +
"Content-Length: " + body.length() + "\r\n" +
"Content-Type: application/sdp\r\n\r\n" +
body;
mOutputStream.write(request.getBytes("UTF-8"));
mOutputStream.flush();
}
/**
* Forges and sends the SETUP request
*/
private void sendRequestSetup() throws IllegalStateException, SocketException, IOException {
// audio와 video session을 가져옴
for (int i=0;i<2;i++) {
Stream stream = mParameters.session.getTrack(i);
if (stream != null) {
String params = mParameters.transport==TRANSPORT_TCP ?
("TCP;interleaved="+2*i+"-"+(2*i+1)) : ("UDP;unicast;client_port="+(5000+2*i)+"-"+(5000+2*i+1)+";mode=receive");
String request = "SETUP rtsp://"+mParameters.host+":"+mParameters.port+mParameters.path+"/trackID="+i+" RTSP/1.0\r\n" +
"Transport: RTP/AVP/"+params+"\r\n" +
addHeaders();
Log.i(TAG,request.substring(0, request.indexOf("\r\n")));
mOutputStream.write(request.getBytes("UTF-8"));
mOutputStream.flush();
}
}
}
/**
* Forges and sends the RECORD request
*/
private void sendRequestRecord() throws IllegalStateException, SocketException, IOException {
String request = "RECORD rtsp://"+mParameters.host+":"+mParameters.port+mParameters.path+" RTSP/1.0\r\n" +
"Range: npt=0.000-\r\n" +
addHeaders();
Log.i(TAG,request.substring(0, request.indexOf("\r\n")));
mOutputStream.write(request.getBytes("UTF-8"));
mOutputStream.flush();
}
/**
* Forges and sends the TEARDOWN request
*/
private void sendRequestTeardown() throws IOException {
String request = "TEARDOWN rtsp://"+mParameters.host+":"+mParameters.port+mParameters.path+" RTSP/1.0\r\n" + addHeaders();
Log.i(TAG,request.substring(0, request.indexOf("\r\n")));
mOutputStream.write(request.getBytes("UTF-8"));
mOutputStream.flush();
}
/**
* Forges and sends the OPTIONS request
*/
private void sendRequestOption() throws IOException {
String request = "OPTIONS rtsp://"+mParameters.host+":"+mParameters.port+mParameters.path+" RTSP/1.0\r\n" + addHeaders();
Log.i(TAG,request.substring(0, request.indexOf("\r\n")));
mOutputStream.write(request.getBytes("UTF-8"));
mOutputStream.flush();
Response.parseResponse(mBufferedReader);
}
private String addHeaders() {
return "CSeq: " + (++mCSeq) + "\r\n" +
"Content-Length: 0\r\n" +
"Session: " + mSessionID + "\r\n" +
// For some reason you may have to remove last "\r\n" in the next line to make the RTSP client work with your wowza server :/
(mAuthorization != null ? "Authorization: " + mAuthorization + "\r\n":"") + "\r\n";
}
/**
* If the connection with the RTSP server is lost, we try to reconnect to it as
* long as {@link #stopStream()} is not called.
*/
private Runnable mConnectionMonitor = new Runnable() {
@Override
public void run() {
if (mState == STATE_STARTED) {
try {
// We poll the RTSP server with OPTION requests
sendRequestOption();
mHandler.postDelayed(mConnectionMonitor, 6000);
} catch (IOException e) {
// Happens if the OPTION request fails
postMessage(ERROR_CONNECTION_LOST);
Log.e(TAG, "Connection lost with the server...");
mParameters.session.stop();
mHandler.post(mRetryConnection);
}
}
}
};
/** Here, we try to reconnect to the RTSP. */
private Runnable mRetryConnection = new Runnable() {
@Override
public void run() {
if (mState == STATE_STARTED) {
try {
Log.e(TAG, "Trying to reconnect...");
tryConnection();
try {
mParameters.session.start();
mHandler.post(mConnectionMonitor);
postMessage(MESSAGE_CONNECTION_RECOVERED);
} catch (Exception e) {
abort();
}
} catch (IOException e) {
mHandler.postDelayed(mRetryConnection,1000);
}
}
}
};
final protected static char[] hexArray = {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
private static String bytesToHex(byte[] bytes) {
char[] hexChars = new char[bytes.length * 2];
int v;
for ( int j = 0; j < bytes.length; j++ ) {
v = bytes[j] & 0xFF;
hexChars[j * 2] = hexArray[v >>> 4];
hexChars[j * 2 + 1] = hexArray[v & 0x0F];
}
return new String(hexChars);
}
/** Needed for the Digest Access Authentication. */
private String computeMd5Hash(String buffer) {
MessageDigest md;
try {
md = MessageDigest.getInstance("MD5");
return bytesToHex(md.digest(buffer.getBytes("UTF-8")));
} catch (NoSuchAlgorithmException ignore) {
} catch (UnsupportedEncodingException e) {}
return "";
}
private void postMessage(final int message) {
mMainHandler.post(new Runnable() {
@Override
public void run() {
if (mCallback != null) {
mCallback.onRtspUpdate(message, null);
}
}
});
}
private void postError(final int message, final Exception e) {
mMainHandler.post(new Runnable() {
@Override
public void run() {
if (mCallback != null) {
mCallback.onRtspUpdate(message, e);
}
}
});
}
static class Response {
// Parses method & uri
public static final Pattern regexStatus = Pattern.compile("RTSP/\\d.\\d (\\d+) (\\w+)",Pattern.CASE_INSENSITIVE);
// Parses a request header
public static final Pattern rexegHeader = Pattern.compile("(\\S+):(.+)",Pattern.CASE_INSENSITIVE);
// Parses a WWW-Authenticate header
public static final Pattern rexegAuthenticate = Pattern.compile("realm=\"(.+)\",\\s+nonce=\"(\\w+)\"",Pattern.CASE_INSENSITIVE);
// Parses a Session header
public static final Pattern rexegSession = Pattern.compile("(\\d+)",Pattern.CASE_INSENSITIVE);
// Parses a Transport header
public static final Pattern rexegTransport = Pattern.compile("client_port=(\\d+)-(\\d+).+server_port=(\\d+)-(\\d+)",Pattern.CASE_INSENSITIVE);
public int status;
public HashMap<String,String> headers = new HashMap<>();
/** Parse the method, URI & headers of a RTSP request */
public static Response parseResponse(BufferedReader input) throws IOException, IllegalStateException, SocketException {
Response response = new Response();
String line;
Matcher matcher;
// Parsing request method & URI
if ((line = input.readLine())==null) throw new SocketException("Connection lost");
Log.i("parseResponse", regexStatus.matcher(line).toString());
matcher = regexStatus.matcher(line);
matcher.find();
response.status = Integer.parseInt(matcher.group(1));
// Parsing headers of the request
while ( (line = input.readLine()) != null) {
//Log.e(TAG,"l: "+line.length()+", c: "+line);
if (line.length()>3) {
matcher = rexegHeader.matcher(line);
matcher.find();
response.headers.put(matcher.group(1).toLowerCase(Locale.US),matcher.group(2));
} else {
break;
}
}
if (line==null) throw new SocketException("Connection lost");
Log.d(TAG, "Response from server: "+response.status);
return response;
}
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtsp;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.BindException;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Locale;
import java.util.WeakHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.majorkernelpanic.streaming.Session;
import net.majorkernelpanic.streaming.SessionBuilder;
import android.app.Service;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.os.Binder;
import android.os.IBinder;
import android.preference.PreferenceManager;
import android.util.Base64;
import android.util.Log;
/**
* Implementation of a subset of the RTSP protocol (RFC 2326).
*
* It allows remote control of an android device cameras & microphone.
* For each connected client, a Session is instantiated.
* The Session will start or stop streams according to what the client wants.
*
*/
public class RtspServer extends Service {
public final static String TAG = "RtspServer";
/** The server name that will appear in responses. */
public static String SERVER_NAME = "MajorKernelPanic RTSP Server";
/** Port used by default. */
public static final int DEFAULT_RTSP_PORT = 8086;
/** Port already in use. */
public final static int ERROR_BIND_FAILED = 0x00;
/** A stream could not be started. */
public final static int ERROR_START_FAILED = 0x01;
/** Streaming started. */
public final static int MESSAGE_STREAMING_STARTED = 0X00;
/** Streaming stopped. */
public final static int MESSAGE_STREAMING_STOPPED = 0X01;
/** Key used in the SharedPreferences to store whether the RTSP server is enabled or not. */
public final static String KEY_ENABLED = "rtsp_enabled";
/** Key used in the SharedPreferences for the port used by the RTSP server. */
public final static String KEY_PORT = "rtsp_port";
protected SessionBuilder mSessionBuilder;
protected SharedPreferences mSharedPreferences;
protected boolean mEnabled = true;
protected int mPort = DEFAULT_RTSP_PORT;
protected WeakHashMap<Session,Object> mSessions = new WeakHashMap<>(2);
private RequestListener mListenerThread;
private final IBinder mBinder = new LocalBinder();
private boolean mRestart = false;
private final LinkedList<CallbackListener> mListeners = new LinkedList<>();
/** Credentials for Basic Auth */
private String mUsername;
private String mPassword;
public RtspServer() {
}
/** Be careful: those callbacks won't necessarily be called from the ui thread ! */
public interface CallbackListener {
/** Called when an error occurs. */
void onError(RtspServer server, Exception e, int error);
/** Called when streaming starts/stops. */
void onMessage(RtspServer server, int message);
}
/**
* See {@link RtspServer.CallbackListener} to check out what events will be fired once you set up a listener.
* @param listener The listener
*/
public void addCallbackListener(CallbackListener listener) {
synchronized (mListeners) {
if (!mListeners.isEmpty()) {
for (CallbackListener cl : mListeners) {
if (cl == listener) return;
}
}
mListeners.add(listener);
}
}
/**
* Removes the listener.
* @param listener The listener
*/
public void removeCallbackListener(CallbackListener listener) {
synchronized (mListeners) {
mListeners.remove(listener);
}
}
/** Returns the port used by the RTSP server. */
public int getPort() {
return mPort;
}
/**
* Sets the port for the RTSP server to use.
* @param port The port
*/
public void setPort(int port) {
Editor editor = mSharedPreferences.edit();
editor.putString(KEY_PORT, String.valueOf(port));
editor.commit();
}
/**
* Set Basic authorization to access RTSP Stream
* @param username username
* @param password password
*/
public void setAuthorization(String username, String password)
{
mUsername = username;
mPassword = password;
}
/**
* Starts (or restart if needed, if for example the configuration
* of the server has been modified) the RTSP server.
*/
public void start() {
if (!mEnabled || mRestart) stop();
if (mEnabled && mListenerThread == null) {
try {
mListenerThread = new RequestListener();
} catch (Exception e) {
mListenerThread = null;
}
}
mRestart = false;
}
/**
* Stops the RTSP server but not the Android Service.
* To stop the Android Service you need to call {@link android.content.Context#stopService(Intent)};
*/
public void stop() {
if (mListenerThread != null) {
try {
mListenerThread.kill();
for ( Session session : mSessions.keySet() ) {
if ( session != null && session.isStreaming() ) {
session.stop();
}
}
} catch (Exception e) {
} finally {
mListenerThread = null;
}
}
}
/** Returns whether or not the RTSP server is streaming to some client(s). */
public boolean isStreaming() {
for ( Session session : mSessions.keySet() ) {
if ( session != null && session.isStreaming() ) {
return true;
}
}
return false;
}
public boolean isEnabled() {
return mEnabled;
}
/** Returns the bandwidth consumed by the RTSP server in bits per second. */
public long getBitrate() {
long bitrate = 0;
for ( Session session : mSessions.keySet() ) {
if ( session != null && session.isStreaming() ) {
bitrate += session.getBitrate();
}
}
return bitrate;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
return START_STICKY;
}
@Override
public void onCreate() {
// Let's restore the state of the service
mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
mPort = Integer.parseInt(mSharedPreferences.getString(KEY_PORT, String.valueOf(mPort)));
mEnabled = mSharedPreferences.getBoolean(KEY_ENABLED, mEnabled);
// If the configuration is modified, the server will adjust
mSharedPreferences.registerOnSharedPreferenceChangeListener(mOnSharedPreferenceChangeListener);
start();
}
@Override
public void onDestroy() {
stop();
mSharedPreferences.unregisterOnSharedPreferenceChangeListener(mOnSharedPreferenceChangeListener);
}
private OnSharedPreferenceChangeListener mOnSharedPreferenceChangeListener = new OnSharedPreferenceChangeListener() {
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
if (key.equals(KEY_PORT)) {
int port = Integer.parseInt(sharedPreferences.getString(KEY_PORT, String.valueOf(mPort)));
if (port != mPort) {
mPort = port;
mRestart = true;
start();
}
}
else if (key.equals(KEY_ENABLED)) {
mEnabled = sharedPreferences.getBoolean(KEY_ENABLED, mEnabled);
start();
}
}
};
/** The Binder you obtain when a connection with the Service is established. */
public class LocalBinder extends Binder {
public RtspServer getService() {
return RtspServer.this;
}
}
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
protected void postMessage(int id) {
synchronized (mListeners) {
if (!mListeners.isEmpty()) {
for (CallbackListener cl : mListeners) {
cl.onMessage(this, id);
}
}
}
}
protected void postError(Exception exception, int id) {
synchronized (mListeners) {
if (!mListeners.isEmpty()) {
for (CallbackListener cl : mListeners) {
cl.onError(this, exception, id);
}
}
}
}
/**
* By default the RTSP uses {@link UriParser} to parse the URI requested by the client
* but you can change that behavior by override this method.
* @param uri The uri that the client has requested
* @param client The socket associated to the client
* @return A proper session
*/
protected Session handleRequest(String uri, Socket client) throws IllegalStateException, IOException {
Session session = UriParser.parse(uri);
session.setOrigin(client.getLocalAddress().getHostAddress());
if (session.getDestination()==null) {
session.setDestination(client.getInetAddress().getHostAddress());
}
return session;
}
class RequestListener extends Thread implements Runnable {
private final ServerSocket mServer;
public RequestListener() throws IOException {
try {
mServer = new ServerSocket(mPort);
start();
} catch (BindException e) {
Log.e(TAG,"Port already in use !");
postError(e, ERROR_BIND_FAILED);
throw e;
}
}
public void run() {
Log.i(TAG,"RTSP server listening on port "+mServer.getLocalPort());
while (!Thread.interrupted()) {
try {
new WorkerThread(mServer.accept()).start();
} catch (SocketException e) {
break;
} catch (IOException e) {
Log.e(TAG,e.getMessage());
continue;
}
}
Log.i(TAG,"RTSP server stopped !");
}
public void kill() {
try {
mServer.close();
} catch (IOException e) {}
try {
this.join();
} catch (InterruptedException ignore) {}
}
}
// One thread per client
class WorkerThread extends Thread implements Runnable {
private final Socket mClient;
private final OutputStream mOutput;
private final BufferedReader mInput;
// Each client has an associated session
private Session mSession;
public WorkerThread(final Socket client) throws IOException {
mInput = new BufferedReader(new InputStreamReader(client.getInputStream()));
mOutput = client.getOutputStream();
mClient = client;
mSession = new Session();
}
public void run() {
Request request;
Response response;
Log.i(TAG, "Connection from "+mClient.getInetAddress().getHostAddress());
while (!Thread.interrupted()) {
request = null;
response = null;
// Parse the request
try {
request = Request.parseRequest(mInput);
} catch (SocketException e) {
// Client has left
break;
} catch (Exception e) {
// We don't understand the request :/
response = new Response();
response.status = Response.STATUS_BAD_REQUEST;
}
// Do something accordingly like starting the streams, sending a session description
if (request != null) {
try {
response = processRequest(request);
}
catch (Exception e) {
// This alerts the main thread that something has gone wrong in this thread
postError(e, ERROR_START_FAILED);
Log.e(TAG,e.getMessage()!=null?e.getMessage():"An error occurred");
e.printStackTrace();
response = new Response(request);
}
}
// We always send a response
// The client will receive an "INTERNAL SERVER ERROR" if an exception has been thrown at some point
try {
response.send(mOutput);
} catch (IOException e) {
Log.e(TAG,"Response was not sent properly");
break;
}
}
// Streaming stops when client disconnects
boolean streaming = isStreaming();
mSession.syncStop();
if (streaming && !isStreaming()) {
postMessage(MESSAGE_STREAMING_STOPPED);
}
mSession.release();
try {
mClient.close();
} catch (IOException ignore) {}
Log.i(TAG, "Client disconnected");
}
public Response processRequest(Request request) throws IllegalStateException, IOException {
Response response = new Response(request);
//Ask for authorization unless this is an OPTIONS request
if(!isAuthorized(request) && !request.method.equalsIgnoreCase("OPTIONS"))
{
response.attributes = "WWW-Authenticate: Basic realm=\""+SERVER_NAME+"\"\r\n";
response.status = Response.STATUS_UNAUTHORIZED;
}
else
{
/* ********************************************************************************** */
/* ********************************* Method DESCRIBE ******************************** */
/* ********************************************************************************** */
if (request.method.equalsIgnoreCase("DESCRIBE")) {
// Parse the requested URI and configure the session
mSession = handleRequest(request.uri, mClient);
mSessions.put(mSession, null);
mSession.syncConfigure();
String requestContent = mSession.getSessionDescription();
String requestAttributes =
"Content-Base: " + mClient.getLocalAddress().getHostAddress() + ":" + mClient.getLocalPort() + "/\r\n" +
"Content-Type: application/sdp\r\n";
response.attributes = requestAttributes;
response.content = requestContent;
// If no exception has been thrown, we reply with OK
response.status = Response.STATUS_OK;
}
/* ********************************************************************************** */
/* ********************************* Method OPTIONS ********************************* */
/* ********************************************************************************** */
else if (request.method.equalsIgnoreCase("OPTIONS")) {
response.status = Response.STATUS_OK;
response.attributes = "Public: DESCRIBE,SETUP,TEARDOWN,PLAY,PAUSE\r\n";
response.status = Response.STATUS_OK;
}
/* ********************************************************************************** */
/* ********************************** Method SETUP ********************************** */
/* ********************************************************************************** */
else if (request.method.equalsIgnoreCase("SETUP")) {
Pattern p;
Matcher m;
int p2, p1, ssrc, trackId, src[];
String destination;
p = Pattern.compile("trackID=(\\w+)", Pattern.CASE_INSENSITIVE);
m = p.matcher(request.uri);
if (!m.find()) {
response.status = Response.STATUS_BAD_REQUEST;
return response;
}
trackId = Integer.parseInt(m.group(1));
if (!mSession.trackExists(trackId)) {
response.status = Response.STATUS_NOT_FOUND;
return response;
}
p = Pattern.compile("client_port=(\\d+)(?:-(\\d+))?", Pattern.CASE_INSENSITIVE);
m = p.matcher(request.headers.get("transport"));
if (!m.find()) {
int[] ports = mSession.getTrack(trackId).getDestinationPorts();
p1 = ports[0];
p2 = ports[1];
} else {
p1 = Integer.parseInt(m.group(1));
if (m.group(2) == null) {
p2 = p1+1;
} else {
p2 = Integer.parseInt(m.group(2));
}
}
ssrc = mSession.getTrack(trackId).getSSRC();
src = mSession.getTrack(trackId).getLocalPorts();
destination = mSession.getDestination();
mSession.getTrack(trackId).setDestinationPorts(p1, p2);
boolean streaming = isStreaming();
mSession.syncStart(trackId);
if (!streaming && isStreaming()) {
postMessage(MESSAGE_STREAMING_STARTED);
}
response.attributes = "Transport: RTP/AVP/UDP;" + (InetAddress.getByName(destination).isMulticastAddress() ? "multicast" : "unicast") +
";destination=" + mSession.getDestination() +
";client_port=" + p1 + "-" + p2 +
";server_port=" + src[0] + "-" + src[1] +
";ssrc=" + Integer.toHexString(ssrc) +
";mode=play\r\n" +
"Session: " + "1185d20035702ca" + "\r\n" +
"Cache-Control: no-cache\r\n";
response.status = Response.STATUS_OK;
// If no exception has been thrown, we reply with OK
response.status = Response.STATUS_OK;
}
/* ********************************************************************************** */
/* ********************************** Method PLAY *********************************** */
/* ********************************************************************************** */
else if (request.method.equalsIgnoreCase("PLAY")) {
String requestAttributes = "RTP-Info: ";
if (mSession.trackExists(0))
requestAttributes += "url=rtsp://" + mClient.getLocalAddress().getHostAddress() + ":" + mClient.getLocalPort() + "/trackID=" + 0 + ";seq=0,";
if (mSession.trackExists(1))
requestAttributes += "url=rtsp://" + mClient.getLocalAddress().getHostAddress() + ":" + mClient.getLocalPort() + "/trackID=" + 1 + ";seq=0,";
requestAttributes = requestAttributes.substring(0, requestAttributes.length() - 1) + "\r\nSession: 1185d20035702ca\r\n";
response.attributes = requestAttributes;
// If no exception has been thrown, we reply with OK
response.status = Response.STATUS_OK;
}
/* ********************************************************************************** */
/* ********************************** Method PAUSE ********************************** */
/* ********************************************************************************** */
else if (request.method.equalsIgnoreCase("PAUSE")) {
response.status = Response.STATUS_OK;
}
/* ********************************************************************************** */
/* ********************************* Method TEARDOWN ******************************** */
/* ********************************************************************************** */
else if (request.method.equalsIgnoreCase("TEARDOWN")) {
response.status = Response.STATUS_OK;
}
/* ********************************************************************************** */
/* ********************************* Unknown method ? ******************************* */
/* ********************************************************************************** */
else {
Log.e(TAG, "Command unknown: " + request);
response.status = Response.STATUS_BAD_REQUEST;
}
}
return response;
}
/**
* Check if the request is authorized
* @param request
* @return true or false
*/
private boolean isAuthorized(Request request)
{
String auth = request.headers.get("authorization");
if(mUsername == null || mPassword == null || mUsername.isEmpty())
return true;
if(auth != null && !auth.isEmpty())
{
String received = auth.substring(auth.lastIndexOf(" ")+1);
String local = mUsername+":"+mPassword;
String localEncoded = Base64.encodeToString(local.getBytes(),Base64.NO_WRAP);
if(localEncoded.equals(received))
return true;
}
return false;
}
}
static class Request {
// Parse method & uri
public static final Pattern regexMethod = Pattern.compile("(\\w+) (\\S+) RTSP",Pattern.CASE_INSENSITIVE);
// Parse a request header
public static final Pattern rexegHeader = Pattern.compile("(\\S+):(.+)",Pattern.CASE_INSENSITIVE);
public String method;
public String uri;
public HashMap<String,String> headers = new HashMap<>();
/** Parse the method, uri & headers of a RTSP request */
public static Request parseRequest(BufferedReader input) throws IOException, IllegalStateException, SocketException {
Request request = new Request();
String line;
Matcher matcher;
// Parsing request method & uri
if ((line = input.readLine())==null) throw new SocketException("Client disconnected");
matcher = regexMethod.matcher(line);
matcher.find();
request.method = matcher.group(1);
request.uri = matcher.group(2);
// Parsing headers of the request
while ( (line = input.readLine()) != null && line.length()>3 ) {
matcher = rexegHeader.matcher(line);
matcher.find();
request.headers.put(matcher.group(1).toLowerCase(Locale.US),matcher.group(2));
}
if (line==null) throw new SocketException("Client disconnected");
// It's not an error, it's just easier to follow what's happening in logcat with the request in red
Log.e(TAG,request.method+" "+request.uri);
return request;
}
}
static class Response {
// Status code definitions
public static final String STATUS_OK = "200 OK";
public static final String STATUS_BAD_REQUEST = "400 Bad Request";
public static final String STATUS_UNAUTHORIZED = "401 Unauthorized";
public static final String STATUS_NOT_FOUND = "404 Not Found";
public static final String STATUS_INTERNAL_SERVER_ERROR = "500 Internal Server Error";
public String status = STATUS_INTERNAL_SERVER_ERROR;
public String content = "";
public String attributes = "";
private final Request mRequest;
public Response(Request request) {
this.mRequest = request;
}
public Response() {
// Be carefull if you modify the send() method because request might be null !
mRequest = null;
}
public void send(OutputStream output) throws IOException {
int seqid = -1;
try {
seqid = Integer.parseInt(mRequest.headers.get("cseq").replace(" ",""));
} catch (Exception e) {
Log.e(TAG,"Error parsing CSeq: "+(e.getMessage()!=null?e.getMessage():""));
}
String response = "RTSP/1.0 "+status+"\r\n" +
"Server: "+SERVER_NAME+"\r\n" +
(seqid>=0?("Cseq: " + seqid + "\r\n"):"") +
"Content-Length: " + content.length() + "\r\n" +
attributes +
"\r\n" +
content;
Log.d(TAG,response.replace("\r", ""));
output.write(response.getBytes());
}
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.rtsp;
import static net.majorkernelpanic.streaming.SessionBuilder.AUDIO_AAC;
import static net.majorkernelpanic.streaming.SessionBuilder.AUDIO_AMRNB;
import static net.majorkernelpanic.streaming.SessionBuilder.AUDIO_NONE;
import static net.majorkernelpanic.streaming.SessionBuilder.VIDEO_H263;
import static net.majorkernelpanic.streaming.SessionBuilder.VIDEO_H264;
import static net.majorkernelpanic.streaming.SessionBuilder.VIDEO_NONE;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URI;
import java.net.URLEncoder;
import java.net.UnknownHostException;
import java.util.Set;
import net.majorkernelpanic.streaming.MediaStream;
import net.majorkernelpanic.streaming.Session;
import net.majorkernelpanic.streaming.SessionBuilder;
import net.majorkernelpanic.streaming.audio.AudioQuality;
import net.majorkernelpanic.streaming.video.VideoQuality;
import android.content.ContentValues;
import android.hardware.Camera.CameraInfo;
/**
* This class parses URIs received by the RTSP server and configures a Session accordingly.
*/
public class UriParser {
public final static String TAG = "UriParser";
/**
* Configures a Session according to the given URI.
* Here are some examples of URIs that can be used to configure a Session:
* <ul><li>rtsp://xxx.xxx.xxx.xxx:8086?h264&flash=on</li>
* <li>rtsp://xxx.xxx.xxx.xxx:8086?h263&camera=front&flash=on</li>
* <li>rtsp://xxx.xxx.xxx.xxx:8086?h264=200-20-320-240</li>
* <li>rtsp://xxx.xxx.xxx.xxx:8086?aac</li></ul>
* @param uri The URI
* @throws IllegalStateException
* @throws IOException
* @return A Session configured according to the URI
*/
public static Session parse(String uri) throws IllegalStateException, IOException {
SessionBuilder builder = SessionBuilder.getInstance().clone();
byte audioApi = 0, videoApi = 0;
String query = URI.create(uri).getQuery();
String[] queryParams = query == null ? new String[0] : query.split("&");
ContentValues params = new ContentValues();
for(String param:queryParams)
{
String[] keyValue = param.split("=");
String value = "";
try {
value = keyValue[1];
}catch(ArrayIndexOutOfBoundsException e){}
params.put(
URLEncoder.encode(keyValue[0], "UTF-8"), // Name
URLEncoder.encode(value, "UTF-8") // Value
);
}
if (params.size()>0) {
builder.setAudioEncoder(AUDIO_NONE).setVideoEncoder(VIDEO_NONE);
Set<String> paramKeys=params.keySet();
// Those parameters must be parsed first or else they won't necessarily be taken into account
for(String paramName: paramKeys) {
String paramValue = params.getAsString(paramName);
// FLASH ON/OFF
if (paramName.equalsIgnoreCase("flash")) {
if (paramValue.equalsIgnoreCase("on"))
builder.setFlashEnabled(true);
else
builder.setFlashEnabled(false);
}
// CAMERA -> the client can choose between the front facing camera and the back facing camera
else if (paramName.equalsIgnoreCase("camera")) {
if (paramValue.equalsIgnoreCase("back"))
builder.setCamera(CameraInfo.CAMERA_FACING_BACK);
else if (paramValue.equalsIgnoreCase("front"))
builder.setCamera(CameraInfo.CAMERA_FACING_FRONT);
}
// MULTICAST -> the stream will be sent to a multicast group
// The default mutlicast address is 228.5.6.7, but the client can specify another
else if (paramName.equalsIgnoreCase("multicast")) {
if (paramValue!=null) {
try {
InetAddress addr = InetAddress.getByName(paramValue);
if (!addr.isMulticastAddress()) {
throw new IllegalStateException("Invalid multicast address !");
}
builder.setDestination(paramValue);
} catch (UnknownHostException e) {
throw new IllegalStateException("Invalid multicast address !");
}
}
else {
// Default multicast address
builder.setDestination("228.5.6.7");
}
}
// UNICAST -> the client can use this to specify where he wants the stream to be sent
else if (paramName.equalsIgnoreCase("unicast")) {
if (paramValue!=null) {
builder.setDestination(paramValue);
}
}
// VIDEOAPI -> can be used to specify what api will be used to encode video (the MediaRecorder API or the MediaCodec API)
else if (paramName.equalsIgnoreCase("videoapi")) {
if (paramValue!=null) {
if (paramValue.equalsIgnoreCase("mr")) {
videoApi = MediaStream.MODE_MEDIARECORDER_API;
} else if (paramValue.equalsIgnoreCase("mc")) {
videoApi = MediaStream.MODE_MEDIACODEC_API;
}
}
}
// AUDIOAPI -> can be used to specify what api will be used to encode audio (the MediaRecorder API or the MediaCodec API)
else if (paramName.equalsIgnoreCase("audioapi")) {
if (paramValue!=null) {
if (paramValue.equalsIgnoreCase("mr")) {
audioApi = MediaStream.MODE_MEDIARECORDER_API;
} else if (paramValue.equalsIgnoreCase("mc")) {
audioApi = MediaStream.MODE_MEDIACODEC_API;
}
}
}
// TTL -> the client can modify the time to live of packets
// By default ttl=64
else if (paramName.equalsIgnoreCase("ttl")) {
if (paramValue!=null) {
try {
int ttl = Integer.parseInt(paramValue);
if (ttl<0) throw new IllegalStateException();
builder.setTimeToLive(ttl);
} catch (Exception e) {
throw new IllegalStateException("The TTL must be a positive integer !");
}
}
}
// H.264
else if (paramName.equalsIgnoreCase("h264")) {
VideoQuality quality = VideoQuality.parseQuality(paramValue);
builder.setVideoQuality(quality).setVideoEncoder(VIDEO_H264);
}
// H.263
else if (paramName.equalsIgnoreCase("h263")) {
VideoQuality quality = VideoQuality.parseQuality(paramValue);
builder.setVideoQuality(quality).setVideoEncoder(VIDEO_H263);
}
// AMR
else if (paramName.equalsIgnoreCase("amrnb") || paramName.equalsIgnoreCase("amr")) {
AudioQuality quality = AudioQuality.parseQuality(paramValue);
builder.setAudioQuality(quality).setAudioEncoder(AUDIO_AMRNB);
}
// AAC
else if (paramName.equalsIgnoreCase("aac")) {
AudioQuality quality = AudioQuality.parseQuality(paramValue);
builder.setAudioQuality(quality).setAudioEncoder(AUDIO_AAC);
}
}
}
if (builder.getVideoEncoder()==VIDEO_NONE && builder.getAudioEncoder()==AUDIO_NONE) {
SessionBuilder b = SessionBuilder.getInstance();
builder.setVideoEncoder(b.getVideoEncoder());
builder.setAudioEncoder(b.getAudioEncoder());
}
Session session = builder.build();
if (videoApi>0 && session.getVideoTrack() != null) {
session.getVideoTrack().setStreamingMethod(videoApi);
}
if (audioApi>0 && session.getAudioTrack() != null) {
session.getAudioTrack().setStreamingMethod(audioApi);
}
return session;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.video;
import java.util.ArrayList;
import java.util.HashMap;
import android.annotation.SuppressLint;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import android.util.Log;
import android.util.SparseArray;
@SuppressLint("InlinedApi")
public class CodecManager {
public final static String TAG = "CodecManager";
public static final int[] SUPPORTED_COLOR_FORMATS = {
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar
};
/**
* There currently is no way to know if an encoder is software or hardware from the MediaCodecInfo class,
* so we need to maintain a list of known software encoders.
*/
public static final String[] SOFTWARE_ENCODERS = {
"OMX.google.h264.encoder"
};
/**
* Contains a list of encoders and color formats that we may use with a {@link CodecManager.Translator}.
*/
static class Codecs {
/** A hardware encoder supporting a color format we can use. */
public String hardwareCodec;
public int hardwareColorFormat;
/** A software encoder supporting a color format we can use. */
public String softwareCodec;
public int softwareColorFormat;
}
/**
* Contains helper functions to choose an encoder and a color format.
*/
static class Selector {
private static HashMap<String,SparseArray<ArrayList<String>>> sHardwareCodecs = new HashMap<>();
private static HashMap<String,SparseArray<ArrayList<String>>> sSoftwareCodecs = new HashMap<>();
/**
* Determines the most appropriate encoder to compress the video from the Camera
*/
public static Codecs findCodecsFormMimeType(String mimeType, boolean tryColorFormatSurface) {
findSupportedColorFormats(mimeType);
SparseArray<ArrayList<String>> hardwareCodecs = sHardwareCodecs.get(mimeType);
SparseArray<ArrayList<String>> softwareCodecs = sSoftwareCodecs.get(mimeType);
Codecs list = new Codecs();
// On devices running 4.3, we need an encoder supporting the color format used to work with a Surface
if (Build.VERSION.SDK_INT>=18 && tryColorFormatSurface) {
int colorFormatSurface = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
try {
// We want a hardware encoder
list.hardwareCodec = hardwareCodecs.get(colorFormatSurface).get(0);
list.hardwareColorFormat = colorFormatSurface;
} catch (Exception e) {}
try {
// We want a software encoder
list.softwareCodec = softwareCodecs.get(colorFormatSurface).get(0);
list.softwareColorFormat = colorFormatSurface;
} catch (Exception e) {}
if (list.hardwareCodec != null) {
Log.v(TAG,"Choosen primary codec: "+list.hardwareCodec+" with color format: "+list.hardwareColorFormat);
} else {
Log.e(TAG,"No supported hardware codec found !");
}
if (list.softwareCodec != null) {
Log.v(TAG,"Choosen secondary codec: "+list.hardwareCodec+" with color format: "+list.hardwareColorFormat);
} else {
Log.e(TAG,"No supported software codec found !");
}
return list;
}
for (int i=0;i<SUPPORTED_COLOR_FORMATS.length;i++) {
try {
list.hardwareCodec = hardwareCodecs.get(SUPPORTED_COLOR_FORMATS[i]).get(0);
list.hardwareColorFormat = SUPPORTED_COLOR_FORMATS[i];
break;
} catch (Exception e) {}
}
for (int i=0;i<SUPPORTED_COLOR_FORMATS.length;i++) {
try {
list.softwareCodec = softwareCodecs.get(SUPPORTED_COLOR_FORMATS[i]).get(0);
list.softwareColorFormat = SUPPORTED_COLOR_FORMATS[i];
break;
} catch (Exception e) {}
}
if (list.hardwareCodec != null) {
Log.v(TAG,"Choosen primary codec: "+list.hardwareCodec+" with color format: "+list.hardwareColorFormat);
} else {
Log.e(TAG,"No supported hardware codec found !");
}
if (list.softwareCodec != null) {
Log.v(TAG,"Choosen secondary codec: "+list.hardwareCodec+" with color format: "+list.softwareColorFormat);
} else {
Log.e(TAG,"No supported software codec found !");
}
return list;
}
/**
* Returns an associative array of the supported color formats and the names of the encoders for a given mime type
* This can take up to sec on certain phones the first time you run it...
**/
@SuppressLint("NewApi")
static private void findSupportedColorFormats(String mimeType) {
SparseArray<ArrayList<String>> softwareCodecs = new SparseArray<ArrayList<String>>();
SparseArray<ArrayList<String>> hardwareCodecs = new SparseArray<ArrayList<String>>();
if (sSoftwareCodecs.containsKey(mimeType)) {
return;
}
Log.v(TAG,"Searching supported color formats for mime type \""+mimeType+"\"...");
// We loop through the encoders, apparently this can take up to a sec (testes on a GS3)
for(int j = MediaCodecList.getCodecCount() - 1; j >= 0; j--){
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(j);
if (!codecInfo.isEncoder()) continue;
String[] types = codecInfo.getSupportedTypes();
for (int i = 0; i < types.length; i++) {
if (types[i].equalsIgnoreCase(mimeType)) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
boolean software = false;
for (int k=0;k<SOFTWARE_ENCODERS.length;k++) {
if (codecInfo.getName().equalsIgnoreCase(SOFTWARE_ENCODERS[i])) {
software = true;
}
}
// And through the color formats supported
for (int k = 0; k < capabilities.colorFormats.length; k++) {
int format = capabilities.colorFormats[k];
if (software) {
if (softwareCodecs.get(format) == null) softwareCodecs.put(format, new ArrayList<String>());
softwareCodecs.get(format).add(codecInfo.getName());
} else {
if (hardwareCodecs.get(format) == null) hardwareCodecs.put(format, new ArrayList<String>());
hardwareCodecs.get(format).add(codecInfo.getName());
}
}
}
}
}
// Logs the supported color formats on the phone
StringBuilder e = new StringBuilder();
e.append("Supported color formats on this phone: ");
for (int i=0;i<softwareCodecs.size();i++) e.append(softwareCodecs.keyAt(i)+", ");
for (int i=0;i<hardwareCodecs.size();i++) e.append(hardwareCodecs.keyAt(i)+(i==hardwareCodecs.size()-1?".":", "));
Log.v(TAG, e.toString());
sSoftwareCodecs.put(mimeType, softwareCodecs);
sHardwareCodecs.put(mimeType, hardwareCodecs);
return;
}
}
static class Translator {
private int mOutputColorFormat;
private int mWidth;
private int mHeight;
private int mYStride;
private int mUVStride;
private int mYSize;
private int mUVSize;
private int bufferSize;
private int i;
private byte[] tmp;
public Translator(int outputColorFormat, int width, int height) {
mOutputColorFormat = outputColorFormat;
mWidth = width;
mHeight = height;
mYStride = (int) Math.ceil(mWidth / 16.0) * 16;
mUVStride = (int) Math.ceil( (mYStride / 2) / 16.0) * 16;
mYSize = mYStride * mHeight;
mUVSize = mUVStride * mHeight / 2;
bufferSize = mYSize + mUVSize * 2;
tmp = new byte[mUVSize*2];
}
public int getBufferSize() {
return bufferSize;
}
public int getUVStride() {
return mUVStride;
}
public int getYStride() {
return mYStride;
}
public byte[] translate(byte[] buffer) {
if (mOutputColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
// FIXME: May be issues because of padding here :/
int wh4 = bufferSize/6; //wh4 = width*height/4
byte tmp;
for (i=wh4*4; i<wh4*5; i++) {
tmp = buffer[i];
buffer[i] = buffer[i+wh4];
buffer[i+wh4] = tmp;
}
}
else if (mOutputColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
// We need to interleave the U and V channel
System.arraycopy(buffer, mYSize, tmp, 0, mUVSize*2); // Y
for (i = 0; i < mUVSize; i++) {
buffer[mYSize + i*2] = tmp[i + mUVSize]; // Cb (U)
buffer[mYSize + i*2+1] = tmp[i]; // Cr (V)
}
}
return buffer;
}
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.video;
import java.io.IOException;
import net.majorkernelpanic.streaming.SessionBuilder;
import net.majorkernelpanic.streaming.rtp.H263Packetizer;
import android.graphics.ImageFormat;
import android.hardware.Camera.CameraInfo;
import android.media.MediaRecorder;
import android.service.textservice.SpellCheckerService.Session;
/**
* A class for streaming H.263 from the camera of an android device using RTP.
* You should use a {@link Session} instantiated with {@link SessionBuilder} instead of using this class directly.
* Call {@link #setDestinationAddress(InetAddress)}, {@link #setDestinationPorts(int)} and {@link #setVideoQuality(VideoQuality)}
* to configure the stream. You can then call {@link #start()} to start the RTP stream.
* Call {@link #stop()} to stop the stream.
*/
public class H263Stream extends VideoStream {
/**
* Constructs the H.263 stream.
* Uses CAMERA_FACING_BACK by default.
* @throws IOException
*/
public H263Stream() throws IOException {
this(CameraInfo.CAMERA_FACING_BACK);
}
/**
* Constructs the H.263 stream.
* @param cameraId Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT
* @throws IOException
*/
public H263Stream(int cameraId) {
super(cameraId);
mCameraImageFormat = ImageFormat.NV21;
mVideoEncoder = MediaRecorder.VideoEncoder.H263;
mPacketizer = new H263Packetizer();
}
/**
* Starts the stream.
*/
public synchronized void start() throws IllegalStateException, IOException {
if (!mStreaming) {
configure();
super.start();
}
}
public synchronized void configure() throws IllegalStateException, IOException {
super.configure();
mMode = MODE_MEDIARECORDER_API;
mQuality = mRequestedQuality.clone();
}
/**
* Returns a description of the stream using SDP. It can then be included in an SDP file.
*/
public String getSessionDescription() {
return "m=video "+String.valueOf(getDestinationPorts()[0])+" RTP/AVP 96\r\n" +
"a=rtpmap:96 H263-1998/90000\r\n";
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.video;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import net.majorkernelpanic.streaming.SessionBuilder;
import net.majorkernelpanic.streaming.exceptions.ConfNotSupportedException;
import net.majorkernelpanic.streaming.exceptions.StorageUnavailableException;
import net.majorkernelpanic.streaming.hw.EncoderDebugger;
import net.majorkernelpanic.streaming.mp4.MP4Config;
import net.majorkernelpanic.streaming.rtp.H264Packetizer;
import android.annotation.SuppressLint;
import android.content.SharedPreferences.Editor;
import android.graphics.ImageFormat;
import android.hardware.Camera.CameraInfo;
import android.media.MediaRecorder;
import android.os.Environment;
import android.service.textservice.SpellCheckerService.Session;
import android.util.Base64;
import android.util.Log;
/**
* A class for streaming H.264 from the camera of an android device using RTP.
* You should use a {@link Session} instantiated with {@link SessionBuilder} instead of using this class directly.
* Call {@link #setDestinationAddress(InetAddress)}, {@link #setDestinationPorts(int)} and {@link #setVideoQuality(VideoQuality)}
* to configure the stream. You can then call {@link #start()} to start the RTP stream.
* Call {@link #stop()} to stop the stream.
*/
public class H264Stream extends VideoStream {
public final static String TAG = "H264Stream";
private Semaphore mLock = new Semaphore(0);
private MP4Config mConfig;
/**
* Constructs the H.264 stream.
* Uses CAMERA_FACING_BACK by default.
*/
public H264Stream() {
this(CameraInfo.CAMERA_FACING_BACK);
}
/**
* Constructs the H.264 stream.
* @param cameraId Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT
* @throws IOException
*/
public H264Stream(int cameraId) {
super(cameraId);
mMimeType = "video/avc";
mCameraImageFormat = ImageFormat.NV21;
mVideoEncoder = MediaRecorder.VideoEncoder.H264;
mPacketizer = new H264Packetizer();
}
/**
* Returns a description of the stream using SDP. It can then be included in an SDP file.
*/
public synchronized String getSessionDescription() throws IllegalStateException {
if (mConfig == null) throw new IllegalStateException("You need to call configure() first !");
return "m=video "+String.valueOf(getDestinationPorts()[0])+" RTP/AVP 96\r\n" +
"a=rtpmap:96 H264/90000\r\n" +
"a=fmtp:96 packetization-mode=1;profile-level-id="+mConfig.getProfileLevel()+";sprop-parameter-sets="+mConfig.getB64SPS()+","+mConfig.getB64PPS()+";\r\n";
}
/**
* Starts the stream.
* This will also open the camera and display the preview if {@link #startPreview()} has not already been called.
*/
public synchronized void start() throws IllegalStateException, IOException {
if (!mStreaming) {
configure();
byte[] pps = Base64.decode(mConfig.getB64PPS(), Base64.NO_WRAP);
byte[] sps = Base64.decode(mConfig.getB64SPS(), Base64.NO_WRAP);
((H264Packetizer)mPacketizer).setStreamParameters(pps, sps);
super.start();
}
}
/**
* Configures the stream. You need to call this before calling {@link #getSessionDescription()} to apply
* your configuration of the stream.
*/
public synchronized void configure() throws IllegalStateException, IOException {
super.configure();
mMode = mRequestedMode;
mQuality = mRequestedQuality.clone();
mConfig = testH264();
}
/**
* Tests if streaming with the given configuration (bit rate, frame rate, resolution) is possible
* and determines the pps and sps. Should not be called by the UI thread.
**/
private MP4Config testH264() throws IllegalStateException, IOException {
if (mMode != MODE_MEDIARECORDER_API) return testMediaCodecAPI();
else return testMediaRecorderAPI();
}
@SuppressLint("NewApi")
private MP4Config testMediaCodecAPI() throws RuntimeException, IOException {
createCamera();
updateCamera();
try {
if (mQuality.resX>=640) {
// Using the MediaCodec API with the buffer method for high resolutions is too slow
mMode = MODE_MEDIARECORDER_API;
}
EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
return new MP4Config(debugger.getB64SPS(), debugger.getB64PPS());
} catch (Exception e) {
// Fallback on the old streaming method using the MediaRecorder API
Log.e(TAG,"Resolution not supported with the MediaCodec API, we fallback on the old streamign method.");
mMode = MODE_MEDIARECORDER_API;
return testH264();
}
}
// Should not be called by the UI thread
private MP4Config testMediaRecorderAPI() throws RuntimeException, IOException {
String key = PREF_PREFIX+"h264-mr-"+mRequestedQuality.framerate+","+mRequestedQuality.resX+","+mRequestedQuality.resY;
if (mSettings != null && mSettings.contains(key) ) {
String[] s = mSettings.getString(key, "").split(",");
return new MP4Config(s[0],s[1],s[2]);
}
if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
throw new StorageUnavailableException("No external storage or external storage not ready !");
}
final String TESTFILE = Environment.getExternalStorageDirectory().getPath()+"/spydroid-test.mp4";
Log.i(TAG,"Testing H264 support... Test file saved at: "+TESTFILE);
try {
File file = new File(TESTFILE);
file.createNewFile();
} catch (IOException e) {
throw new StorageUnavailableException(e.getMessage());
}
// Save flash state & set it to false so that led remains off while testing h264
boolean savedFlashState = mFlashEnabled;
mFlashEnabled = false;
boolean previewStarted = mPreviewStarted;
boolean cameraOpen = mCamera!=null;
createCamera();
// Stops the preview if needed
if (mPreviewStarted) {
lockCamera();
try {
mCamera.stopPreview();
} catch (Exception e) {}
mPreviewStarted = false;
}
try {
Thread.sleep(100);
} catch (InterruptedException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
unlockCamera();
try {
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setCamera(mCamera);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setVideoEncoder(mVideoEncoder);
mMediaRecorder.setPreviewDisplay(mSurfaceView.getHolder().getSurface());
mMediaRecorder.setVideoSize(mRequestedQuality.resX,mRequestedQuality.resY);
mMediaRecorder.setVideoFrameRate(mRequestedQuality.framerate);
mMediaRecorder.setVideoEncodingBitRate((int)(mRequestedQuality.bitrate*0.8));
mMediaRecorder.setOutputFile(TESTFILE);
mMediaRecorder.setMaxDuration(3000);
// We wait a little and stop recording
mMediaRecorder.setOnInfoListener(new MediaRecorder.OnInfoListener() {
public void onInfo(MediaRecorder mr, int what, int extra) {
Log.d(TAG,"MediaRecorder callback called !");
if (what==MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
Log.d(TAG,"MediaRecorder: MAX_DURATION_REACHED");
} else if (what==MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
Log.d(TAG,"MediaRecorder: MAX_FILESIZE_REACHED");
} else if (what==MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
Log.d(TAG,"MediaRecorder: INFO_UNKNOWN");
} else {
Log.d(TAG,"WTF ?");
}
mLock.release();
}
});
// Start recording
mMediaRecorder.prepare();
mMediaRecorder.start();
if (mLock.tryAcquire(6,TimeUnit.SECONDS)) {
Log.d(TAG,"MediaRecorder callback was called :)");
Thread.sleep(400);
} else {
Log.d(TAG,"MediaRecorder callback was not called after 6 seconds... :(");
}
} catch (IOException e) {
throw new ConfNotSupportedException(e.getMessage());
} catch (RuntimeException e) {
throw new ConfNotSupportedException(e.getMessage());
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
try {
mMediaRecorder.stop();
} catch (Exception e) {}
mMediaRecorder.release();
mMediaRecorder = null;
lockCamera();
if (!cameraOpen) destroyCamera();
// Restore flash state
mFlashEnabled = savedFlashState;
if (previewStarted) {
// If the preview was started before the test, we try to restart it.
try {
startPreview();
} catch (Exception e) {}
}
}
// Retrieve SPS & PPS & ProfileId with MP4Config
MP4Config config = new MP4Config(TESTFILE);
// Delete dummy video
File file = new File(TESTFILE);
if (!file.delete()) Log.e(TAG,"Temp file could not be erased");
Log.i(TAG,"H264 Test succeded...");
// Save test result
if (mSettings != null) {
Editor editor = mSettings.edit();
editor.putString(key, config.getProfileLevel()+","+config.getB64SPS()+","+config.getB64PPS());
editor.commit();
}
return config;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.video;
import java.util.Iterator;
import java.util.List;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
/**
* A class that represents the quality of a video stream.
* It contains the resolution, the framerate (in fps) and the bitrate (in bps) of the stream.
*/
public class VideoQuality {
public final static String TAG = "VideoQuality";
/** Default video stream quality. */
public final static VideoQuality DEFAULT_VIDEO_QUALITY = new VideoQuality(176,144,20,500000);
/** Represents a quality for a video stream. */
public VideoQuality() {}
/**
* Represents a quality for a video stream.
* @param resX The horizontal resolution
* @param resY The vertical resolution
*/
public VideoQuality(int resX, int resY) {
this.resX = resX;
this.resY = resY;
}
/**
* Represents a quality for a video stream.
* @param resX The horizontal resolution
* @param resY The vertical resolution
* @param framerate The framerate in frame per seconds
* @param bitrate The bitrate in bit per seconds
*/
public VideoQuality(int resX, int resY, int framerate, int bitrate) {
this.framerate = framerate;
this.bitrate = bitrate;
this.resX = resX;
this.resY = resY;
}
public int framerate = 0;
public int bitrate = 0;
public int resX = 0;
public int resY = 0;
public boolean equals(VideoQuality quality) {
if (quality==null) return false;
return (quality.resX == this.resX &&
quality.resY == this.resY &&
quality.framerate == this.framerate &&
quality.bitrate == this.bitrate);
}
public VideoQuality clone() {
return new VideoQuality(resX,resY,framerate,bitrate);
}
public static VideoQuality parseQuality(String str) {
VideoQuality quality = DEFAULT_VIDEO_QUALITY.clone();
if (str != null) {
String[] config = str.split("-");
try {
quality.bitrate = Integer.parseInt(config[0])*1000; // conversion to bit/s
quality.framerate = Integer.parseInt(config[1]);
quality.resX = Integer.parseInt(config[2]);
quality.resY = Integer.parseInt(config[3]);
}
catch (IndexOutOfBoundsException ignore) {}
}
return quality;
}
public String toString() {
return resX+"x"+resY+" px, "+framerate+" fps, "+bitrate/1000+" kbps";
}
/**
* Checks if the requested resolution is supported by the camera.
* If not, it modifies it by supported parameters.
**/
public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
VideoQuality v = quality.clone();
int minDist = Integer.MAX_VALUE;
String supportedSizesStr = "Supported resolutions: ";
List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
Size size = it.next();
supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
int dist = Math.abs(quality.resX - size.width);
if (dist<minDist) {
minDist = dist;
v.resX = size.width;
v.resY = size.height;
}
}
Log.v(TAG, supportedSizesStr);
if (quality.resX != v.resX || quality.resY != v.resY) {
Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
}
return v;
}
public static int[] determineMaximumSupportedFramerate(Camera.Parameters parameters) {
int[] maxFps = new int[]{0,0};
String supportedFpsRangesStr = "Supported frame rates: ";
List<int[]> supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
for (Iterator<int[]> it = supportedFpsRanges.iterator(); it.hasNext();) {
int[] interval = it.next();
// Intervals are returned as integers, for example "29970" means "29.970" FPS.
supportedFpsRangesStr += interval[0]/1000+"-"+interval[1]/1000+"fps"+(it.hasNext()?", ":"");
if (interval[1]>maxFps[1] || (interval[0]>maxFps[0] && interval[1]==maxFps[1])) {
maxFps = interval;
}
}
Log.v(TAG,supportedFpsRangesStr);
return maxFps;
}
}
/*
* Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.majorkernelpanic.streaming.video;
import java.io.FileDescriptor;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import net.majorkernelpanic.streaming.MediaStream;
import net.majorkernelpanic.streaming.Stream;
import net.majorkernelpanic.streaming.exceptions.CameraInUseException;
import net.majorkernelpanic.streaming.exceptions.ConfNotSupportedException;
import net.majorkernelpanic.streaming.exceptions.InvalidSurfaceException;
import net.majorkernelpanic.streaming.gl.SurfaceView;
import net.majorkernelpanic.streaming.hw.EncoderDebugger;
import net.majorkernelpanic.streaming.hw.NV21Convertor;
import net.majorkernelpanic.streaming.rtp.MediaCodecInputStream;
import android.annotation.SuppressLint;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Parameters;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Looper;
import android.os.ParcelFileDescriptor;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
/**
* Don't use this class directly.
*/
public abstract class VideoStream extends MediaStream {
protected final static String TAG = "VideoStream";
protected VideoQuality mRequestedQuality = VideoQuality.DEFAULT_VIDEO_QUALITY.clone();
protected VideoQuality mQuality = mRequestedQuality.clone();
protected SurfaceHolder.Callback mSurfaceHolderCallback = null;
protected SurfaceView mSurfaceView = null;
protected SharedPreferences mSettings = null;
protected int mVideoEncoder, mCameraId = 0;
protected int mRequestedOrientation = 0, mOrientation = 0;
protected Camera mCamera;
protected Thread mCameraThread;
protected Looper mCameraLooper;
protected boolean mCameraOpenedManually = true;
protected boolean mFlashEnabled = false;
protected boolean mSurfaceReady = false;
protected boolean mUnlocked = false;
protected boolean mPreviewStarted = false;
protected boolean mUpdated = false;
protected String mMimeType;
protected String mEncoderName;
protected int mEncoderColorFormat;
protected int mCameraImageFormat;
protected int mMaxFps = 0;
/**
* Don't use this class directly.
* Uses CAMERA_FACING_BACK by default.
*/
public VideoStream() {
this(CameraInfo.CAMERA_FACING_BACK);
}
/**
* Don't use this class directly
* @param camera Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT
*/
@SuppressLint("InlinedApi")
public VideoStream(int camera) {
super();
setCamera(camera);
}
/**
* Sets the camera that will be used to capture video.
* You can call this method at any time and changes will take effect next time you start the stream.
* @param camera Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT
*/
public void setCamera(int camera) {
CameraInfo cameraInfo = new CameraInfo();
int numberOfCameras = Camera.getNumberOfCameras();
for (int i=0;i<numberOfCameras;i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == camera) {
mCameraId = i;
break;
}
}
}
/** Switch between the front facing and the back facing camera of the phone.
* If {@link #startPreview()} has been called, the preview will be briefly interrupted.
* If {@link #start()} has been called, the stream will be briefly interrupted.
* You should not call this method from the main thread if you are already streaming.
* @throws IOException
* @throws RuntimeException
**/
public void switchCamera() throws RuntimeException, IOException {
if (Camera.getNumberOfCameras() == 1) throw new IllegalStateException("Phone only has one camera !");
boolean streaming = mStreaming;
boolean previewing = mCamera!=null && mCameraOpenedManually;
mCameraId = (mCameraId == CameraInfo.CAMERA_FACING_BACK) ? CameraInfo.CAMERA_FACING_FRONT : CameraInfo.CAMERA_FACING_BACK;
setCamera(mCameraId);
stopPreview();
mFlashEnabled = false;
if (previewing) startPreview();
if (streaming) start();
}
/**
* Returns the id of the camera currently selected.
* Can be either {@link CameraInfo#CAMERA_FACING_BACK} or
* {@link CameraInfo#CAMERA_FACING_FRONT}.
*/
public int getCamera() {
return mCameraId;
}
public Camera getCameraObject() {return mCamera;}
/**
* Sets a Surface to show a preview of recorded media (video).
* You can call this method at any time and changes will take effect next time you call {@link #start()}.
*/
public synchronized void setSurfaceView(SurfaceView view) {
mSurfaceView = view;
if (mSurfaceHolderCallback != null && mSurfaceView != null && mSurfaceView.getHolder() != null) {
mSurfaceView.getHolder().removeCallback(mSurfaceHolderCallback);
}
if (mSurfaceView != null && mSurfaceView.getHolder() != null) {
mSurfaceHolderCallback = new Callback() {
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mSurfaceReady = false;
stopPreview();
Log.d(TAG,"Surface destroyed !");
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
mSurfaceReady = true;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG,"Surface Changed !");
}
};
mSurfaceView.getHolder().addCallback(mSurfaceHolderCallback);
mSurfaceReady = true;
}
}
/** Turns the LED on or off if phone has one. */
public synchronized void setFlashState(boolean state) {
// If the camera has already been opened, we apply the change immediately
if (mCamera != null) {
if (mStreaming && mMode == MODE_MEDIARECORDER_API) {
lockCamera();
}
Parameters parameters = mCamera.getParameters();
// We test if the phone has a flash
if (parameters.getFlashMode()==null) {
// The phone has no flash or the choosen camera can not toggle the flash
throw new RuntimeException("Can't turn the flash on !");
} else {
parameters.setFlashMode(state?Parameters.FLASH_MODE_TORCH:Parameters.FLASH_MODE_OFF);
try {
mCamera.setParameters(parameters);
mFlashEnabled = state;
} catch (RuntimeException e) {
mFlashEnabled = false;
throw new RuntimeException("Can't turn the flash on !");
} finally {
if (mStreaming && mMode == MODE_MEDIARECORDER_API) {
unlockCamera();
}
}
}
} else {
mFlashEnabled = state;
}
}
/**
* Toggles the LED of the phone if it has one.
* You can get the current state of the flash with {@link VideoStream#getFlashState()}.
*/
public synchronized void toggleFlash() {
setFlashState(!mFlashEnabled);
}
/** Indicates whether or not the flash of the phone is on. */
public boolean getFlashState() {
return mFlashEnabled;
}
/**
* Sets the orientation of the preview.
* @param orientation The orientation of the preview
*/
public void setPreviewOrientation(int orientation) {
mRequestedOrientation = orientation;
mUpdated = false;
}
/**
* Sets the configuration of the stream. You can call this method at any time
* and changes will take effect next time you call {@link #configure()}.
* @param videoQuality Quality of the stream
*/
public void setVideoQuality(VideoQuality videoQuality) {
if (!mRequestedQuality.equals(videoQuality)) {
mRequestedQuality = videoQuality.clone();
mUpdated = false;
}
}
/**
* Returns the quality of the stream.
*/
public VideoQuality getVideoQuality() {
return mRequestedQuality;
}
/**
* Some data (SPS and PPS params) needs to be stored when {@link #getSessionDescription()} is called
* @param prefs The SharedPreferences that will be used to save SPS and PPS parameters
*/
public void setPreferences(SharedPreferences prefs) {
mSettings = prefs;
}
/**
* Configures the stream. You need to call this before calling {@link #getSessionDescription()}
* to apply your configuration of the stream.
*/
public synchronized void configure() throws IllegalStateException, IOException {
super.configure();
mOrientation = mRequestedOrientation;
}
/**
* Starts the stream.
* This will also open the camera and display the preview
* if {@link #startPreview()} has not already been called.
*/
public synchronized void start() throws IllegalStateException, IOException {
if (!mPreviewStarted) mCameraOpenedManually = false;
super.start();
Log.d(TAG,"Stream configuration: FPS: "+mQuality.framerate+" Width: "+mQuality.resX+" Height: "+mQuality.resY);
}
/** Stops the stream. */
public synchronized void stop() {
if (mCamera != null) {
if (mMode == MODE_MEDIACODEC_API) {
mCamera.setPreviewCallbackWithBuffer(null);
}
if (mMode == MODE_MEDIACODEC_API_2) {
((SurfaceView)mSurfaceView).removeMediaCodecSurface();
}
super.stop();
// We need to restart the preview
if (!mCameraOpenedManually) {
destroyCamera();
} else {
try {
startPreview();
} catch (RuntimeException e) {
e.printStackTrace();
}
}
}
}
public synchronized void startPreview()
throws CameraInUseException,
InvalidSurfaceException,
RuntimeException {
mCameraOpenedManually = true;
if (!mPreviewStarted) {
createCamera();
updateCamera();
}
}
/**
* Stops the preview.
*/
public synchronized void stopPreview() {
mCameraOpenedManually = false;
stop();
}
/**
* Video encoding is done by a MediaRecorder.
*/
protected void encodeWithMediaRecorder() throws IOException, ConfNotSupportedException {
Log.d(TAG,"Video encoded using the MediaRecorder API");
// We need a local socket to forward data output by the camera to the packetizer
createSockets();
// Reopens the camera if needed
destroyCamera();
createCamera();
// The camera must be unlocked before the MediaRecorder can use it
unlockCamera();
try {
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setCamera(mCamera);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setVideoEncoder(mVideoEncoder);
mMediaRecorder.setPreviewDisplay(mSurfaceView.getHolder().getSurface());
mMediaRecorder.setVideoSize(mRequestedQuality.resX,mRequestedQuality.resY);
mMediaRecorder.setVideoFrameRate(mRequestedQuality.framerate);
// The bandwidth actually consumed is often above what was requested
mMediaRecorder.setVideoEncodingBitRate((int)(mRequestedQuality.bitrate*0.8));
// We write the output of the camera in a local socket instead of a file !
// This one little trick makes streaming feasible quiet simply: data from the camera
// can then be manipulated at the other end of the socket
FileDescriptor fd = null;
if (sPipeApi == PIPE_API_PFD) {
fd = mParcelWrite.getFileDescriptor();
} else {
fd = mSender.getFileDescriptor();
}
mMediaRecorder.setOutputFile(fd);
mMediaRecorder.prepare();
mMediaRecorder.start();
} catch (Exception e) {
throw new ConfNotSupportedException(e.getMessage());
}
InputStream is = null;
if (sPipeApi == PIPE_API_PFD) {
is = new ParcelFileDescriptor.AutoCloseInputStream(mParcelRead);
} else {
is = mReceiver.getInputStream();
}
// This will skip the MPEG4 header if this step fails we can't stream anything :(
try {
byte buffer[] = new byte[4];
// Skip all atoms preceding mdat atom
while (!Thread.interrupted()) {
while (is.read() != 'm');
is.read(buffer,0,3);
if (buffer[0] == 'd' && buffer[1] == 'a' && buffer[2] == 't') break;
}
} catch (IOException e) {
Log.e(TAG,"Couldn't skip mp4 header :/");
stop();
throw e;
}
// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
mPacketizer.setInputStream(is);
mPacketizer.start();
mStreaming = true;
}
/**
* Video encoding is done by a MediaCodec.
*/
protected void encodeWithMediaCodec() throws RuntimeException, IOException {
if (mMode == MODE_MEDIACODEC_API_2) {
// Uses the method MediaCodec.createInputSurface to feed the encoder
encodeWithMediaCodecMethod2();
} else {
// Uses dequeueInputBuffer to feed the encoder
encodeWithMediaCodecMethod1();
}
}
/**
* Video encoding is done by a MediaCodec.
*/
@SuppressLint("NewApi")
protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException {
Log.d(TAG,"Video encoded using the MediaCodec API with a buffer");
// Updates the parameters of the camera if needed
createCamera();
updateCamera();
// Estimates the frame rate of the camera
measureFramerate();
// Starts the preview if needed
if (!mPreviewStarted) {
try {
mCamera.startPreview();
mPreviewStarted = true;
} catch (RuntimeException e) {
destroyCamera();
throw e;
}
}
EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
final NV21Convertor convertor = debugger.getNV21Convertor();
mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,debugger.getEncoderColorFormat());
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
Camera.PreviewCallback callback = new Camera.PreviewCallback() {
long now = System.nanoTime()/1000, oldnow = now, i=0;
ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
oldnow = now;
now = System.nanoTime()/1000;
if (i++>3) {
i = 0;
//Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps.");
}
try {
int bufferIndex = mMediaCodec.dequeueInputBuffer(500000);
if (bufferIndex>=0) {
inputBuffers[bufferIndex].clear();
if (data == null) Log.e(TAG,"Symptom of the \"Callback buffer was to small\" problem...");
else convertor.convert(data, inputBuffers[bufferIndex]);
mMediaCodec.queueInputBuffer(bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0);
} else {
Log.e(TAG,"No buffer available !");
}
} finally {
mCamera.addCallbackBuffer(data);
}
}
};
for (int i=0;i<10;i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]);
mCamera.setPreviewCallbackWithBuffer(callback);
// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
mPacketizer.start();
mStreaming = true;
}
/**
* Video encoding is done by a MediaCodec.
* But here we will use the buffer-to-surface method
*/
@SuppressLint({ "InlinedApi", "NewApi" })
protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException {
Log.d(TAG,"Video encoded using the MediaCodec API with a surface");
// Updates the parameters of the camera if needed
createCamera();
updateCamera();
// Estimates the frame rate of the camera
measureFramerate();
EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
Surface surface = mMediaCodec.createInputSurface();
((SurfaceView)mSurfaceView).addMediaCodecSurface(surface);
mMediaCodec.start();
// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
mPacketizer.start();
mStreaming = true;
}
/**
* Returns a description of the stream using SDP.
* This method can only be called after {@link Stream#configure()}.
* @throws IllegalStateException Thrown when {@link Stream#configure()} wa not called.
*/
public abstract String getSessionDescription() throws IllegalStateException;
/**
* Opens the camera in a new Looper thread so that the preview callback is not called from the main thread
* If an exception is thrown in this Looper thread, we bring it back into the main thread.
* @throws RuntimeException Might happen if another app is already using the camera.
*/
private void openCamera() throws RuntimeException {
final Semaphore lock = new Semaphore(0);
final RuntimeException[] exception = new RuntimeException[1];
mCameraThread = new Thread(new Runnable() {
@Override
public void run() {
Looper.prepare();
mCameraLooper = Looper.myLooper();
try {
mCamera = Camera.open(mCameraId);
} catch (RuntimeException e) {
exception[0] = e;
} finally {
lock.release();
Looper.loop();
}
}
});
mCameraThread.start();
lock.acquireUninterruptibly();
if (exception[0] != null) throw new CameraInUseException(exception[0].getMessage());
}
protected synchronized void createCamera() throws RuntimeException {
if (mSurfaceView == null)
throw new InvalidSurfaceException("Invalid surface !");
if (mSurfaceView.getHolder() == null || !mSurfaceReady)
throw new InvalidSurfaceException("Invalid surface !");
if (mCamera == null) {
openCamera();
mUpdated = false;
mUnlocked = false;
mCamera.setErrorCallback(new Camera.ErrorCallback() {
@Override
public void onError(int error, Camera camera) {
// On some phones when trying to use the camera facing front the media server will die
// Whether or not this callback may be called really depends on the phone
if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
// In this case the application must release the camera and instantiate a new one
Log.e(TAG,"Media server died !");
// We don't know in what thread we are so stop needs to be synchronized
mCameraOpenedManually = false;
stop();
} else {
Log.e(TAG,"Error unknown with the camera: "+error);
}
}
});
try {
// If the phone has a flash, we turn it on/off according to mFlashEnabled
// setRecordingHint(true) is a very nice optimization if you plane to only use the Camera for recording
Parameters parameters = mCamera.getParameters();
if (parameters.getFlashMode()!=null) {
parameters.setFlashMode(mFlashEnabled?Parameters.FLASH_MODE_TORCH:Parameters.FLASH_MODE_OFF);
}
parameters.setRecordingHint(true);
mCamera.setParameters(parameters);
mCamera.setDisplayOrientation(mOrientation);
try {
if (mMode == MODE_MEDIACODEC_API_2) {
mSurfaceView.startGLThread();
mCamera.setPreviewTexture(mSurfaceView.getSurfaceTexture());
} else {
mCamera.setPreviewDisplay(mSurfaceView.getHolder());
}
} catch (IOException e) {
throw new InvalidSurfaceException("Invalid surface !");
}
} catch (RuntimeException e) {
destroyCamera();
throw e;
}
}
}
protected synchronized void destroyCamera() {
if (mCamera != null) {
if (mStreaming) super.stop();
lockCamera();
mCamera.stopPreview();
try {
mCamera.release();
} catch (Exception e) {
Log.e(TAG,e.getMessage()!=null?e.getMessage():"unknown error");
}
mCamera = null;
mCameraLooper.quit();
mUnlocked = false;
mPreviewStarted = false;
}
}
protected synchronized void updateCamera() throws RuntimeException {
// The camera is already correctly configured
if (mUpdated) return;
if (mPreviewStarted) {
mPreviewStarted = false;
mCamera.stopPreview();
}
Parameters parameters = mCamera.getParameters();
mQuality = VideoQuality.determineClosestSupportedResolution(parameters, mQuality);
int[] max = VideoQuality.determineMaximumSupportedFramerate(parameters);
double ratio = (double)mQuality.resX/(double)mQuality.resY;
mSurfaceView.requestAspectRatio(ratio);
parameters.setPreviewFormat(mCameraImageFormat);
parameters.setPreviewSize(mQuality.resX, mQuality.resY);
parameters.setPreviewFpsRange(max[0], max[1]);
try {
mCamera.setParameters(parameters);
mCamera.setDisplayOrientation(mOrientation);
mCamera.startPreview();
mPreviewStarted = true;
mUpdated = true;
} catch (RuntimeException e) {
destroyCamera();
throw e;
}
}
protected void lockCamera() {
if (mUnlocked) {
Log.d(TAG,"Locking camera");
try {
mCamera.reconnect();
} catch (Exception e) {
Log.e(TAG,e.getMessage());
}
mUnlocked = false;
}
}
protected void unlockCamera() {
if (!mUnlocked) {
Log.d(TAG,"Unlocking camera");
try {
mCamera.unlock();
} catch (Exception e) {
Log.e(TAG,e.getMessage());
}
mUnlocked = true;
}
}
/**
* Computes the average frame rate at which the preview callback is called.
* We will then use this average frame rate with the MediaCodec.
* Blocks the thread in which this function is called.
*/
private void measureFramerate() {
final Semaphore lock = new Semaphore(0);
final Camera.PreviewCallback callback = new Camera.PreviewCallback() {
int i = 0, t = 0;
long now, oldnow, count = 0;
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
i++;
now = System.nanoTime()/1000;
if (i>3) {
t += now - oldnow;
count++;
}
if (i>20) {
mQuality.framerate = (int) (1000000/(t/count)+1);
lock.release();
}
oldnow = now;
}
};
mCamera.setPreviewCallback(callback);
try {
lock.tryAcquire(2,TimeUnit.SECONDS);
Log.d(TAG,"Actual framerate: "+mQuality.framerate);
if (mSettings != null) {
Editor editor = mSettings.edit();
editor.putInt(PREF_PREFIX+"fps"+mRequestedQuality.framerate+","+mCameraImageFormat+","+mRequestedQuality.resX+mRequestedQuality.resY, mQuality.framerate);
editor.commit();
}
} catch (InterruptedException e) {}
mCamera.setPreviewCallback(null);
}
}
## This file is automatically generated by Android Studio.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file should *NOT* be checked into Version Control Systems,
# as it contains information specific to your local configuration.
#
# Location of the SDK. This is only used by Gradle.
# For customization when using a Version Control System, please read the
# header note.
sdk.dir=/Users/gwonjoohee/Library/Android/sdk
\ No newline at end of file
rootProject.name='CCTedV'
include ':app'
include ':libstreaming'
# CCTedV
- 알림용 어플리케이션
- 다운받은 후에 android studio로 코드 실행 가능
# whatsUP
- 알림용 어플리케이션
- 다운받은 후에 android studio로 코드 실행 가능
# file_server
server for get frame data from android
cd file_server
- python3 -m venv env // 가상환경 설치
- source env/bin/activate //가상환경 시작
- pip install django
- pip install djangorestframework
- python manage.py runserver 0.0.0.0:5900 //5900 포트로 장고 프로젝트 시작
No preview for this file type
from django.contrib import admin
# Register your models here.
from .models import Notification
admin.site.register(Notification)
\ No newline at end of file
from django.apps import AppConfig
class DetectionConfig(AppConfig):
name = 'detection'
# Generated by Django 3.0.6 on 2020-06-13 14:33
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Notification',
fields=[
('created_at', models.DateTimeField(auto_created=True)),
('noti_id', models.AutoField(primary_key=True, serialize=False)),
('noti_type', models.CharField(max_length=50)),
('user', models.CharField(max_length=1000)),
],
),
]
from django.db import models
# Create your models here.
class Notification(models.Model):
noti_id = models.AutoField(primary_key=True) # name is filename without extension
noti_type = models.CharField(max_length=50)
user = models.CharField(max_length=1000)
created_at = models.DateTimeField(auto_created=True)
from django.test import TestCase
# Create your tests here.
from django.urls import path
from .views import *
urlpatterns = [
path('', NotificationView.as_view()),
]
\ No newline at end of file
from rest_framework.views import APIView
from django.http import HttpResponse
import requests
import json
import firebase_admin
from firebase_admin import credentials
from firebase_admin import messaging
from firebase_admin import datetime
from django.apps import apps
User = apps.get_model('user', 'User')
cred = credentials.Certificate('./detection/whatsup-ad0b7-firebase-adminsdk-6yhd1-2e4fcd728a.json')
default_app = firebase_admin.initialize_app(cred)
topic = 'detection'
"""
Notification View는 fire_broken 또는 unknown_person이라는 이벤트 발생 시, 스파크에서 호출하는 api를 가진 view입니다.
post함수를 이용하여 각 user의 알림용 device의 고유 토큰을 조회하여 알림을 보낼 수 있습니다.
"""
class NotificationView(APIView):
def post(self, request, *args, **kwargs):
if request.method == 'POST':
timestamp = request.POST.get('timestamp', False)
userId = request.POST.get('userId', False)
detectionType = request.POST.get('detectionType',False)
print("USER : ", userId)
print("timestamp : ", timestamp)
print("detectionType : ", detectionType)
user = User.objects.get(userId=userId)
bodyContent = ""
if detectionType == "fire_broken":
bodyContent = "불났어요 불났어요!! " + timestamp
elif detectionType == "unknown_person":
bodyContent = "침입자 발생!! " + timestamp
message = messaging.Message(
android=messaging.AndroidConfig(
ttl=datetime.timedelta(seconds=3600),
priority='normal',
notification=messaging.AndroidNotification(
title='삐뽀삐뽀',
body = bodyContent,
icon='',
color='#f45342',
sound='default'
),
),
data={
'timestamp': timestamp
},
webpush=messaging.WebpushConfig(
notification=messaging.WebpushNotification(
title='웹 알림',
body='여긴 어떨까',
icon='',
),
),
# topic=topic
token=user.userToken
)
response = messaging.send(message)
# Response is a message ID string.
print('Successfully sent message:', response)
return HttpResponse('notification_success')
return HttpResponse('/notification_failure')
from django.contrib import admin
from .models import File
# Register your models here.
admin.site.register(File)
\ No newline at end of file
from django.apps import AppConfig
class FileConfig(AppConfig):
name = 'file'
from django import forms
from .models import File
class UploadFileForm(forms.ModelForm):
class Meta:
model = File
fields = ('userName', 'file')
# Generated by Django 3.0.6 on 2020-05-08 08:19
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='UploadFileModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fileName', models.TextField(default='')),
('file', models.FileField(null=True, upload_to='')),
],
),
]
# Generated by Django 3.0.6 on 2020-05-08 08:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('file', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='File',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.FileField(upload_to='')),
('name', models.CharField(max_length=100)),
('version', models.IntegerField(default=0)),
('upload_date', models.DateTimeField(auto_now=True, db_index=True)),
],
),
migrations.DeleteModel(
name='UploadFileModel',
),
]
# Generated by Django 3.0.6 on 2020-05-08 09:38
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('file', '0002_auto_20200508_0844'),
]
operations = [
migrations.RenameField(
model_name='file',
old_name='name',
new_name='userName',
),
migrations.RemoveField(
model_name='file',
name='version',
),
]
# Generated by Django 3.0.6 on 2020-05-08 10:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('file', '0003_auto_20200508_0938'),
]
operations = [
migrations.AlterField(
model_name='file',
name='file',
field=models.FileField(upload_to=models.CharField(max_length=100)),
),
]
# Generated by Django 3.0.6 on 2020-05-08 10:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('file', '0004_auto_20200508_1005'),
]
operations = [
migrations.AlterField(
model_name='file',
name='file',
field=models.FileField(upload_to={models.CharField(max_length=100)}),
),
]
# Generated by Django 3.0.6 on 2020-05-08 10:13
from django.db import migrations, models
import file.models
class Migration(migrations.Migration):
dependencies = [
('file', '0005_auto_20200508_1009'),
]
operations = [
migrations.AlterField(
model_name='file',
name='file',
field=models.FileField(upload_to=file.models.File.user_directory_path),
),
]
from django.db import models
class File(models.Model):
def user_directory_path(instance, file):
print("INSTANCE" , instance)
# file will be uploaded to MEDIA_ROOT/user_<id>/<filename>
return '{0}/{1}'.format(instance.userName,file)
userName = models.CharField(max_length=100) # name is filename without extension
upload_date = models.DateTimeField(auto_now=True, db_index=True)
file = models.FileField(upload_to=user_directory_path)
def __str__(self):
return self.userName
\ No newline at end of file
import os
from .models import File
from rest_framework import serializers
class FileUploaderSerializer(serializers.ModelSerializer):
class Meta:
model=File
fields='__all__'
# read_only_fields = '__all__'
def validate(self, validated_data):
validated_data['name'] = os.path.splitext(validated_data['file'].name)[0]
return validated_data
def create(self,validated_data):
return File.objects.create()
\ No newline at end of file
from django.test import TestCase
# Create your tests here.
# from django.urls import path, include
# from rest_framework.routers import DefaultRouter
# from file import views
#
# # The API URLs are now determined automatically by the router.
# urlpatterns = [
# path('file', views.upload_file, name='upload_file'),
# ]
from django.urls import path
from .views import *
urlpatterns = [
path('', FileUploadView.as_view()),
]
\ No newline at end of file
from rest_framework.parsers import FileUploadParser
from rest_framework.views import APIView
from .forms import UploadFileForm
from django.http import HttpResponse
import base64
import json
import os
from kafka import KafkaProducer
from kafka.errors import KafkaError
"""
File Upload View는 전송받은 프레임 데이터를 카프카에 프로듀스하는 클래스입니다.
post api를 안드로이드에서 호출하면 해당 데이터를 jsong Object를 str으로 dumps하여 프로듀스 진행합니다.
"""
class FileUploadView(APIView):
parser_class = (FileUploadParser,)
tog = True
def post(self, request, *args, **kwargs):
if request.method == 'POST':
print("---- data in ----")
befEncoding = request.POST['befEncoding']
userId = request.POST['userId']
timeStamp = request.POST['timeStamp']
dict_data = {'data': befEncoding, 'userId': userId, 'timestamp': timeStamp}
if not os.path.exists('./media/'+userId):
os.makedirs('./media/'+userId)
with open("media/"+userId+"/"+userId+"_"+timeStamp+".json", "w") as file:
json.dump(dict_data, file, indent="\t")
producer = KafkaProducer(bootstrap_servers=['1.201.142.81:9092'], max_request_size=209717600)
jsonObject = json.dumps(dict_data).encode('utf-8')
global tog
FileUploadView.tog = self.toggle( FileUploadView.tog)
# print(type(FileUploadView.tog))
# if FileUploadView.tog == True:
# future = producer.send('test4', jsonObject)
# elif FileUploadView.tog == False:
# future = producer.send('test98', jsonObject)
future = producer.send('test4', jsonObject)
try:
record_metadata = future.get(timeout=10)
except KafkaError as err:
# Decide what to do if produce request failed…
print(err)
# log.exception()
pass
# Successful result returns assigned partition and offset
print("TOPIC : ", record_metadata.topic)
print("Partition :", record_metadata.partition)
print("Offset : ", record_metadata.offset)
print("---- process exit ----")
return HttpResponse('save_success')
else:
form = UploadFileForm()
# return render(request, 'upload.html', {'form': form})
return HttpResponse('/upload_failure')
def toggle(self, tog):
return not tog
"""
Django settings for file_server project.
Generated by 'django-admin startproject' using Django 2.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'y-yt*2es6821o$-5*d01epjpv2jb^^h@uo58or!=%0ijepzaww'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['victoria.khunet.net', 'localhost', '127.0.0.1', '1.201.143.22']
DATA_UPLOAD_MAX_MEMORY_SIZE = 30000000
FILE_UPLOAD_MAX_MEMORY_SIZE = 30000000
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'file.apps.FileConfig',
'detection.apps.DetectionConfig',
'user.apps.UserConfig',
'corsheaders',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
]
ROOT_URLCONF = 'file_server.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'file_server.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
STATIC_URL = '/static/'
CORS_ALLOW_METHODS = [
'DELETE',
'GET',
'OPTIONS',
'PATCH',
'POST',
'PUT',
]
CORS_ORIGIN_ALLOW_ALL = True # If this is used then `CORS_ORIGIN_WHITELIST` will not have any effect
CORS_ALLOW_CREDENTIALS = True
\ No newline at end of file
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('upload', include('file.urls')),
path('notificate', include('detection.urls')),
path('user', include('user.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
\ No newline at end of file
"""
WSGI config for file_server project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'file_server.settings')
application = get_wsgi_application()
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'file_server.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
{
"data": "bef",
"userId": "victoria",
"timestamp": "20200505"
}
\ No newline at end of file
from django.contrib import admin
from .models import User
admin.site.register(User)
\ No newline at end of file
from django.apps import AppConfig
class UserConfig(AppConfig):
name = 'user'
# Generated by Django 3.0.6 on 2020-06-02 07:33
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='File',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('userId', models.CharField(max_length=100)),
('userToken', models.CharField(max_length=500)),
],
),
]
# Generated by Django 3.0.6 on 2020-06-02 07:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('userId', models.CharField(max_length=100, primary_key=True, serialize=False)),
('userToken', models.CharField(max_length=500)),
],
),
migrations.DeleteModel(
name='File',
),
]
from django.db import models
class User(models.Model):
userId = models.CharField(max_length=100, primary_key=True) # name is filename without extension
userToken = models.CharField(max_length=500)
from django.test import TestCase
# Create your tests here.
from django.urls import path
from .views import *
urlpatterns = [
path('', UserView.as_view()),
]
\ No newline at end of file
from django.shortcuts import render
from rest_framework.views import APIView
from django.http import HttpResponse
from django.db import models
from .models import *
from annoying.functions import get_object_or_None
# Create your views here.
"""
User View는 사용자별 id와 그 사용자의 알림용 device의 고유 토큰을 세팅할 수 있는 view입니다.
"""
class UserView(APIView):
def post(self, request, *args, **kwargs):
if request.method == 'POST':
userId = request.POST['userId']
userToken = request.POST['userToken']
print("ALL USER : ", User.objects.all())
user = get_object_or_None(User, userId=userId)
print("user : ", user)
if user != None:
print(user)
if userToken != '': #user에 토큰 입
user.userToken = userToken
user.save()
return HttpResponse('token_enroll_success')
return HttpResponse('/user_enroll_failure')
user = User(userId=userId)
user.save()
return HttpResponse('userId_enroll_success', user)
return HttpResponse('/user_enroll_failure')
\ No newline at end of file
# whatsUp
whatsUp for alarm CCTedV
apply plugin: 'com.android.application'
android {
compileSdkVersion 28
defaultConfig {
applicationId "com.example.whatsup"
minSdkVersion 22
targetSdkVersion 28
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'com.google.android.material:material:1.1.0'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
implementation 'androidx.navigation:navigation-fragment:2.0.0'
implementation 'androidx.navigation:navigation-ui:2.0.0'
implementation 'androidx.lifecycle:lifecycle-extensions:2.0.0'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
implementation 'com.google.firebase:firebase-core:17.0.0'
implementation 'com.google.firebase:firebase-messaging:20.0.0'
implementation("com.squareup.okhttp3:okhttp:4.6.0")
}
apply plugin: 'com.google.gms.google-services'
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
package com.example.whatsup;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.example.whatsup", appContext.getPackageName());
}
}
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.whatsup">
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme"
android:usesCleartextTraffic="true">
<activity
android:name=".MainActivity"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service
android:name=".MyFirebaseMessagingService"
android:enabled="true"
android:exported="false">
<intent-filter>
<action android:name="com.google.firebase.MESSAGING_EVENT" />
<action android:name="com.google.firebase.INSTANCE_ID_EVENT"/>
</intent-filter>
</service>
</application>
<!-- Set custom default icon. This is used when no icon is set for incoming notification messages.
See README(https://goo.gl/l4GJaQ) for more. -->
<meta-data
android:name="com.google.firebase.messaging.default_notification_icon"
android:resource="@drawable/ic_notifications_black_24dp" />
<meta-data
android:name="com.google.firebase.messaging.default_notification_color"
android:resource="@color/colorAccent" />
<meta-data
android:name="com.google.firebase.messaging.default_notification_channel_id"
android:value="@string/default_notification_channel_id"/>
<uses-permission android:name="android.permission.INTERNET" />
</manifest>
\ No newline at end of file
package com.example.whatsup;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.material.bottomnavigation.BottomNavigationView;
import com.google.firebase.FirebaseApp;
import com.google.firebase.iid.FirebaseInstanceId;
import com.google.firebase.iid.InstanceIdResult;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.navigation.NavController;
import androidx.navigation.Navigation;
import androidx.navigation.ui.AppBarConfiguration;
import androidx.navigation.ui.NavigationUI;
public class MainActivity extends AppCompatActivity {
/*
* 이 어플리케이션은 알림용 어플리케이션입니다.
* btn_1을 클릭하면, 해당 디바이스의 고유 토큰이 서버 데이터베이스에 세팅됩니다.
* */
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
BottomNavigationView navView = findViewById(R.id.nav_view);
AppBarConfiguration appBarConfiguration = new AppBarConfiguration.Builder(
R.id.navigation_home, R.id.navigation_dashboard, R.id.navigation_notifications)
.build();
NavController navController = Navigation.findNavController(this, R.id.nav_host_fragment);
NavigationUI.setupActionBarWithNavController(this, navController, appBarConfiguration);
NavigationUI.setupWithNavController(navView, navController);
final EditText mEdit;
FirebaseApp.initializeApp(this);
mEdit = (EditText)findViewById(R.id.userId);
FirebaseInstanceId.getInstance().getInstanceId().addOnSuccessListener(this,
new OnSuccessListener<InstanceIdResult>() {
@Override
public void onSuccess(InstanceIdResult instanceIdResult) {
String newToken = instanceIdResult.getToken();
Log.d("Token", "새토큰" + newToken );
}
}
);
// 버튼을 눌렀을 경우, 저장된 토큰을 가지고 오는 메소드를 설정합니다.
Button btn_1 = findViewById(R.id.button);
btn_1.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v) {
String savedToken = FirebaseInstanceId.getInstance().getToken();
Singleton.getInstance().setUserId(mEdit.getText().toString());
Singleton.getInstance().setUserToken(savedToken);
String url = "http://victoria.khunet.net:5900/user";
final AsyncTask<Void, Void, String> execute = new NetworkTask(url).execute();
Log.d("Button", "등록되어 있는 토큰ID:" + savedToken);
}
});
}
}
package com.example.whatsup;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.app.TaskStackBuilder;
import android.content.Context;
import android.content.Intent;
import android.os.Build;
import android.os.IBinder;
import android.util.Log;
import com.google.firebase.messaging.FirebaseMessagingService;
import com.google.firebase.messaging.RemoteMessage;
import androidx.core.app.NotificationCompat;
public class MyFirebaseMessagingService extends FirebaseMessagingService {
private static final String TAG = "FCM";
/*
* 사용자 디바이스에 파이어베이스 노티피케이션을 받는 Service Layer 계층입니다.
* */
public MyFirebaseMessagingService() {
}
// 새로운 토큰을 확인했을 때 호출되는 메소드.
@Override
public void onNewToken(String token) {
super.onNewToken(token);
Singleton.getInstance().setUserToken(token);
// 토큰 정보를 출력합니다.
Log.e(TAG, "onNewToken 호출됨: " + token);
}
// 새로운 메시지를 받았을 때 호출되는 메소드.
@Override
public void onMessageReceived(RemoteMessage remoteMessage) {
super.onMessageReceived(remoteMessage);
Log.i("hello", "??");
// 일단 받은 데이터 중, 내용만 가지고와 출력하는 메소드 입니다. (파이어 베이스 홈페이지에서 보내면 데이터는 값이 없을 수 있습니다.)
String from = remoteMessage.getFrom();
Log.d(TAG,
"title:" + remoteMessage.getNotification().getTitle()
+ ", body:" + remoteMessage.getNotification().getBody()
+ ", data:" + remoteMessage.getData()
);
// 액티비티 쪽으로 메시지를 전달하는 메소드를 호출합니다.
sendToActivity(
getApplicationContext()
, remoteMessage.getFrom()
, remoteMessage.getNotification().getTitle()
, remoteMessage.getNotification().getBody()
, remoteMessage.getData().toString()
);
sendNotification(remoteMessage.getNotification().getTitle().toString(), remoteMessage.getNotification().getBody().toString());
}
// Activity 쪽으로 메소드를 전달하는 메소드 입니다.
private void sendToActivity(Context context, String from, String title, String body, String contents ){
Intent intent = new Intent(context, ResultActivity.class);
intent.putExtra("from", from);
intent.putExtra("title", title);
intent.putExtra("body", body);
intent.putExtra("contents", contents);
intent.addFlags(
Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_CLEAR_TOP
);
Log.i(TAG, contents);
context.startActivity(intent);
}
private void sendNotification(String tit, String body) {
String title = tit;
String message = body;
/**
* 오레오 버전부터는 Notification Channel이 없으면 푸시가 생성되지 않는 현상이 있습니다.
* **/
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
String channel = "What's Up";
String channel_nm = "fire_detection";
NotificationManager notichannel = (android.app.NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
NotificationChannel channelMessage = new NotificationChannel(channel, channel_nm,
android.app.NotificationManager.IMPORTANCE_DEFAULT);
channelMessage.setDescription("alarm for your cctv");
channelMessage.enableLights(true);
channelMessage.enableVibration(true);
channelMessage.setShowBadge(false);
channelMessage.setVibrationPattern(new long[]{100, 200, 100, 200});
notichannel.createNotificationChannel(channelMessage);
NotificationCompat.Builder notificationBuilder =
new NotificationCompat.Builder(this, channel)
.setSmallIcon(R.drawable.ic_notifications_black_24dp)
.setContentTitle(title)
.setContentText(message)
.setChannelId(channel)
.setAutoCancel(true)
.setDefaults(Notification.DEFAULT_SOUND | Notification.DEFAULT_VIBRATE);
NotificationManager notificationManager =
(NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.notify(9999, notificationBuilder.build());
} else {
Intent resultIntent = new Intent(this, ResultActivity.class);
TaskStackBuilder stackBuilder = TaskStackBuilder.create(this);
stackBuilder.addParentStack( MainActivity.class );
stackBuilder.addNextIntent(resultIntent);
PendingIntent resultPendingIntent = stackBuilder.getPendingIntent(0, PendingIntent.FLAG_UPDATE_CURRENT);
NotificationCompat.Builder notificationBuilder =
new NotificationCompat.Builder(this, "")
.setSmallIcon(R.drawable.ic_notifications_black_24dp)
.setContentTitle(title)
.setContentText(message)
.setAutoCancel(true)
.setDefaults(Notification.DEFAULT_SOUND | Notification.DEFAULT_VIBRATE)
.setContentIntent(resultPendingIntent);
NotificationManager notificationManager =
(NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.notify(9999, notificationBuilder.build());
}
}
}
\ No newline at end of file
package com.example.whatsup;
import android.content.ContentValues;
import android.os.AsyncTask;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import okhttp3.MultipartBody;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
public class NetworkTask extends AsyncTask<Void, Void, String> {
private String url;
/*
* 이 NetworkTask Class를 기반으로 http 통신을 이용하여 api를 호출 할 수 있습니다.
* 이 NetworkTask Class는 사용자 고유 디바이스 토큰정보를 저장할 때 사용됩니다.
* */
public NetworkTask(String url) {
this.url = url;
}
@Override
protected String doInBackground(Void... params) {
RequestBody requestBody = new MultipartBody.Builder().setType(MultipartBody.FORM)
.addFormDataPart("userId", Singleton.getInstance().getUserId())
.addFormDataPart("userToken", Singleton.getInstance().getUserToken())
.build();
OkHttpClient client = new OkHttpClient();
Request request = new Request.Builder().url(url).post(requestBody).build();
Response response = null;
try {
response = client.newCall(request).execute();
} catch (IOException e) {
e.printStackTrace();
}
if (response != null)
Log.i("RES", response.toString());
return "hello";
}
@Override
protected void onPostExecute(String s) {
super.onPostExecute(s);
if(s != null)
Log.i("RESPONSE : ", s);
}
@Override
protected void onPreExecute() {
}
}
package com.example.whatsup;
public class Singleton {
/*
* 이 Singleton은 애플리케이션이 시작될 때 어떤 클래스가 최초 한번만 메모리를 할당하고(Static) 그 메모리에 인스턴스를 만들어 사용하는 클래스입니다.
* 유저 정보를 저장하고 접근할 때, 사용됩니다.
* */
private String userId;
private String userToken;
public String getUserId()
{
return userId;
}
public void setUserId(String data)
{
this.userId = data;
}
public void setUserToken(String data)
{
this.userToken = data;
}
public String getUserToken()
{
return userToken;
}
private static Singleton instance = null;
public static synchronized Singleton getInstance(){
if(null == instance){
instance = new Singleton();
}
return instance;
}
}
package com.example.whatsup.ui.dashboard;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.Observer;
import androidx.lifecycle.ViewModelProviders;
import com.example.whatsup.R;
public class DashboardFragment extends Fragment {
private DashboardViewModel dashboardViewModel;
public View onCreateView(@NonNull LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState) {
dashboardViewModel =
ViewModelProviders.of(this).get(DashboardViewModel.class);
View root = inflater.inflate(R.layout.fragment_dashboard, container, false);
final TextView textView = root.findViewById(R.id.text_dashboard);
dashboardViewModel.getText().observe(getViewLifecycleOwner(), new Observer<String>() {
@Override
public void onChanged(@Nullable String s) {
textView.setText(s);
}
});
return root;
}
}
package com.example.whatsup.ui.dashboard;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.MutableLiveData;
import androidx.lifecycle.ViewModel;
public class DashboardViewModel extends ViewModel {
private MutableLiveData<String> mText;
public DashboardViewModel() {
mText = new MutableLiveData<>();
mText.setValue("This is dashboard fragment");
}
public LiveData<String> getText() {
return mText;
}
}
\ No newline at end of file
package com.example.whatsup.ui.home;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.Observer;
import androidx.lifecycle.ViewModelProviders;
import com.example.whatsup.R;
public class HomeFragment extends Fragment {
private HomeViewModel homeViewModel;
public View onCreateView(@NonNull LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState) {
homeViewModel =
ViewModelProviders.of(this).get(HomeViewModel.class);
View root = inflater.inflate(R.layout.fragment_home, container, false);
final TextView textView = root.findViewById(R.id.text_home);
homeViewModel.getText().observe(getViewLifecycleOwner(), new Observer<String>() {
@Override
public void onChanged(@Nullable String s) {
textView.setText(s);
}
});
return root;
}
}
package com.example.whatsup.ui.home;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.MutableLiveData;
import androidx.lifecycle.ViewModel;
public class HomeViewModel extends ViewModel {
private MutableLiveData<String> mText;
public HomeViewModel() {
mText = new MutableLiveData<>();
mText.setValue("This is home fragment");
}
public LiveData<String> getText() {
return mText;
}
}
\ No newline at end of file
package com.example.whatsup.ui.notifications;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.Observer;
import androidx.lifecycle.ViewModelProviders;
import com.example.whatsup.R;
public class NotificationsFragment extends Fragment {
private NotificationsViewModel notificationsViewModel;
public View onCreateView(@NonNull LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState) {
notificationsViewModel =
ViewModelProviders.of(this).get(NotificationsViewModel.class);
View root = inflater.inflate(R.layout.fragment_notifications, container, false);
final TextView textView = root.findViewById(R.id.text_notifications);
notificationsViewModel.getText().observe(getViewLifecycleOwner(), new Observer<String>() {
@Override
public void onChanged(@Nullable String s) {
textView.setText(s);
}
});
return root;
}
}
package com.example.whatsup.ui.notifications;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.MutableLiveData;
import androidx.lifecycle.ViewModel;
public class NotificationsViewModel extends ViewModel {
private MutableLiveData<String> mText;
public NotificationsViewModel() {
mText = new MutableLiveData<>();
mText.setValue("This is notifications fragment");
}
public LiveData<String> getText() {
return mText;
}
}
\ No newline at end of file
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>
\ No newline at end of file
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24.0"
android:viewportHeight="24.0">
<path
android:fillColor="#FF000000"
android:pathData="M3,13h8L11,3L3,3v10zM3,21h8v-6L3,15v6zM13,21h8L21,11h-8v10zM13,3v6h8L21,3h-8z" />
</vector>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24.0"
android:viewportHeight="24.0">
<path
android:fillColor="#FF000000"
android:pathData="M10,20v-6h4v6h5v-8h3L12,3 2,12h3v8z" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24.0"
android:viewportHeight="24.0">
<path
android:fillColor="#FF000000"
android:pathData="M12,22c1.1,0 2,-0.9 2,-2h-4c0,1.1 0.89,2 2,2zM18,16v-5c0,-3.07 -1.64,-5.64 -4.5,-6.32L13.5,4c0,-0.83 -0.67,-1.5 -1.5,-1.5s-1.5,0.67 -1.5,1.5v0.68C7.63,5.36 6,7.92 6,11v5l-2,2v1h16v-1l-2,-2z" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingTop="?attr/actionBarSize">
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/rl"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:padding="10dp"
tools:context=".MainActivity"
android:background="#c6cabd"
>
<EditText android:id="@+id/userId" android:width="220px"
android:layout_height="400px"
android:layout_width="match_parent" />
<Button
android:id="@+id/button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
app:backgroundTint="#F57F17"
android:text= "setToken"
app:fabSize="auto"
app:tint="@android:color/white" />
</RelativeLayout>
<com.google.android.material.bottomnavigation.BottomNavigationView
android:id="@+id/nav_view"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_marginStart="0dp"
android:layout_marginEnd="0dp"
android:background="?android:attr/windowBackground"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:menu="@menu/bottom_nav_menu" />
<fragment
android:id="@+id/nav_host_fragment"
android:name="androidx.navigation.fragment.NavHostFragment"
android:layout_width="match_parent"
android:layout_height="match_parent"
app:defaultNavHost="true"
app:layout_constraintBottom_toTopOf="@id/nav_view"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:navGraph="@navigation/mobile_navigation" />
</androidx.constraintlayout.widget.ConstraintLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".ui.dashboard.DashboardFragment">
<TextView
android:id="@+id/text_dashboard"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginStart="8dp"
android:layout_marginTop="8dp"
android:layout_marginEnd="8dp"
android:textAlignment="center"
android:textSize="20sp"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".ui.home.HomeFragment">
<TextView
android:id="@+id/text_home"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginStart="8dp"
android:layout_marginTop="8dp"
android:layout_marginEnd="8dp"
android:textAlignment="center"
android:textSize="20sp"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".ui.notifications.NotificationsFragment">
<TextView
android:id="@+id/text_notifications"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginStart="8dp"
android:layout_marginTop="8dp"
android:layout_marginEnd="8dp"
android:textAlignment="center"
android:textSize="20sp"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<menu xmlns:android="http://schemas.android.com/apk/res/android">
<item
android:id="@+id/navigation_home"
android:icon="@drawable/ic_home_black_24dp"
android:title="@string/title_home" />
<item
android:id="@+id/navigation_dashboard"
android:icon="@drawable/ic_dashboard_black_24dp"
android:title="@string/title_dashboard" />
<item
android:id="@+id/navigation_notifications"
android:icon="@drawable/ic_notifications_black_24dp"
android:title="@string/title_notifications" />
</menu>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<navigation xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/mobile_navigation"
app:startDestination="@+id/navigation_home">
<fragment
android:id="@+id/navigation_home"
android:name="com.example.whatsup.ui.home.HomeFragment"
android:label="@string/title_home"
tools:layout="@layout/fragment_home" />
<fragment
android:id="@+id/navigation_dashboard"
android:name="com.example.whatsup.ui.dashboard.DashboardFragment"
android:label="@string/title_dashboard"
tools:layout="@layout/fragment_dashboard" />
<fragment
android:id="@+id/navigation_notifications"
android:name="com.example.whatsup.ui.notifications.NotificationsFragment"
android:label="@string/title_notifications"
tools:layout="@layout/fragment_notifications" />
</navigation>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#6200EE</color>
<color name="colorPrimaryDark">#3700B3</color>
<color name="colorAccent">#03DAC5</color>
</resources>
<resources>
<!-- Default screen margins, per the Android Design guidelines. -->
<dimen name="activity_horizontal_margin">16dp</dimen>
<dimen name="activity_vertical_margin">16dp</dimen>
</resources>
<resources>
<string name="app_name">whatsUP</string>
<string name="title_home">Home</string>
<string name="title_dashboard">Dashboard</string>
<string name="title_notifications">Notifications</string>
<string name="default_notification_channel_id" translatable="false">fcm_default_channel</string>
<string name="default_notification_channel_name" translatable="true">Test</string>
</resources>
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>
package com.example.whatsup;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}
\ No newline at end of file
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.6.3'
classpath 'com.google.gms:google-services:4.2.0' // Google Services plugin
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx1536m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app's APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
#Tue May 12 17:50:08 KST 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
## This file is automatically generated by Android Studio.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file should *NOT* be checked into Version Control Systems,
# as it contains information specific to your local configuration.
#
# Location of the SDK. This is only used by Gradle.
# For customization when using a Version Control System, please read the
# header note.
sdk.dir=/Users/gwonjoohee/Library/Android/sdk
\ No newline at end of file
rootProject.name='whatsUP'
include ':app'