권주희

add file_server, CCTedV, whatsUp code to project

Showing 464 changed files with 13636 additions and 0 deletions
1 +apply plugin: 'com.android.application'
2 +
3 +android {
4 + compileSdkVersion 28
5 +
6 +
7 + defaultConfig {
8 + applicationId "com.example.cctedv"
9 + minSdkVersion 22
10 + targetSdkVersion 28
11 + versionCode 1
12 + versionName "1.0"
13 +
14 + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
15 + }
16 +
17 + buildTypes {
18 + release {
19 + minifyEnabled false
20 + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
21 + }
22 + }
23 +
24 + packagingOptions {
25 + exclude 'META-INF/DEPENDENCIES'
26 + exclude 'META-INF/LICENSE'
27 + exclude 'META-INF/LICENSE.txt'
28 + exclude 'META-INF/license.txt'
29 + exclude 'META-INF/NOTICE'
30 + exclude 'META-INF/NOTICE.txt'
31 + exclude 'META-INF/notice.txt'
32 + exclude 'META-INF/ASL2.0'
33 + exclude("META-INF/*.kotlin_module")
34 + }
35 +
36 +}
37 +
38 +dependencies {
39 + implementation fileTree(dir: 'libs', include: ['*.jar'])
40 +
41 + implementation 'androidx.appcompat:appcompat:1.1.0'
42 + implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
43 + testImplementation 'junit:junit:4.12'
44 + androidTestImplementation 'androidx.test.ext:junit:1.1.1'
45 + androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
46 + implementation project(path: ':libstreaming')
47 + implementation 'com.google.android.material:material:1.0.0'
48 + implementation("com.squareup.okhttp3:okhttp:4.6.0")
49 + compile 'com.squareup.mimecraft:mimecraft:1.1.1'
50 +
51 +
52 +}
1 +# Add project specific ProGuard rules here.
2 +# You can control the set of applied configuration files using the
3 +# proguardFiles setting in build.gradle.
4 +#
5 +# For more details, see
6 +# http://developer.android.com/guide/developing/tools/proguard.html
7 +
8 +# If your project uses WebView with JS, uncomment the following
9 +# and specify the fully qualified class name to the JavaScript interface
10 +# class:
11 +#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 +# public *;
13 +#}
14 +
15 +# Uncomment this to preserve the line number information for
16 +# debugging stack traces.
17 +#-keepattributes SourceFile,LineNumberTable
18 +
19 +# If you keep the line number information, uncomment this to
20 +# hide the original source file name.
21 +#-renamesourcefileattribute SourceFile
1 +package com.example.cctedv;
2 +
3 +import android.content.Context;
4 +
5 +import androidx.test.platform.app.InstrumentationRegistry;
6 +import androidx.test.ext.junit.runners.AndroidJUnit4;
7 +
8 +import org.junit.Test;
9 +import org.junit.runner.RunWith;
10 +
11 +import static org.junit.Assert.*;
12 +
13 +/**
14 + * Instrumented test, which will execute on an Android device.
15 + *
16 + * @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
17 + */
18 +@RunWith(AndroidJUnit4.class)
19 +public class ExampleInstrumentedTest {
20 + @Test
21 + public void useAppContext() {
22 + // Context of the app under test.
23 + Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
24 +
25 + assertEquals("com.example.cctedv", appContext.getPackageName());
26 + }
27 +}
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<manifest xmlns:android="http://schemas.android.com/apk/res/android"
3 + package="com.example.cctedv"
4 + android:installLocation="auto" >
5 +
6 + <application
7 + android:allowBackup="true"
8 + android:icon="@mipmap/ic_launcher"
9 + android:label="@string/app_name"
10 + android:roundIcon="@mipmap/ic_launcher_round"
11 + android:supportsRtl="true"
12 + android:usesCleartextTraffic="true"
13 + android:theme="@style/AppTheme">
14 + <activity android:name=".MainActivity">
15 + <intent-filter>
16 + <action android:name="android.intent.action.MAIN" />
17 +
18 + <category android:name="android.intent.category.LAUNCHER" />
19 + </intent-filter>
20 + </activity>
21 + <activity android:name=".RecordActivity" />
22 + <activity android:name=".SetUserImgActivity" />
23 + </application>
24 + <uses-permission android:name="android.permission.INTERNET" />
25 + <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
26 + <uses-permission android:name="android.permission.RECORD_AUDIO" android:required="true" />
27 + <uses-permission android:name="android.permission.CAMERA" android:required="true" />
28 +
29 +</manifest>
...\ No newline at end of file ...\ No newline at end of file
1 +package com.example.cctedv;
2 +
3 +import android.graphics.Bitmap;
4 +
5 +public class ImgItem {
6 + private String mFilename;
7 + private Bitmap selectedImage;
8 + public ImgItem() {
9 + mFilename = "";
10 + }
11 + public String getmFilename() {
12 + return mFilename;
13 + }
14 + public void setmFilename(String name) {
15 + this.mFilename = name;
16 + }
17 + public Bitmap getSelectedImage(){return selectedImage;}
18 + public void setSelectedImage(Bitmap bm) {this.selectedImage = bm;}
19 +}
1 +package com.example.cctedv;
2 +
3 +import android.content.Context;
4 +import android.util.Log;
5 +import android.view.LayoutInflater;
6 +import android.view.View;
7 +import android.view.ViewGroup;
8 +import android.widget.BaseAdapter;
9 +import android.widget.ImageView;
10 +import android.widget.TextView;
11 +
12 +import java.util.ArrayList;
13 +
14 +public class ImgListAdapter extends BaseAdapter {
15 + private ArrayList<ImgItem> mImgList;
16 + private LayoutInflater mInflater;
17 +
18 + public ImgListAdapter(ArrayList<ImgItem> mImgList) {
19 + this.mImgList = mImgList;
20 + }
21 + @Override
22 + public int getCount() {
23 + return mImgList.size();
24 + }
25 +
26 + @Override
27 + public View getView(int position, View convertView, ViewGroup parent) {
28 +
29 + final Context context = parent.getContext();
30 +
31 + if (convertView == null) {
32 + if (mInflater == null) {
33 + mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
34 + }
35 + convertView=LayoutInflater.from(context).inflate(R.layout.listview_img,null);
36 + }
37 +
38 + ImageView imgView = convertView.findViewById(R.id.img_source);
39 + TextView fileName = convertView.findViewById(R.id.list_file_name);
40 +
41 + ImgItem file = mImgList.get(position);
42 + Log.i("ITEM : ", file.getmFilename());
43 + fileName.setText(file.getmFilename());
44 + imgView.setImageBitmap(file.getSelectedImage());
45 +
46 + convertView.setTag("" + position);
47 + return convertView;
48 + }
49 +
50 + @Override
51 + public long getItemId(int position) {
52 + return position;
53 + }
54 +
55 + @Override
56 + public Object getItem(int position) {
57 + return mImgList.get(position);
58 + }
59 +
60 +
61 +}
1 +package com.example.cctedv;
2 +
3 +import androidx.appcompat.app.AppCompatActivity;
4 +import androidx.core.app.ActivityCompat;
5 +import androidx.core.content.ContextCompat;
6 +
7 +import android.Manifest;
8 +import android.content.Intent;
9 +import android.content.pm.PackageManager;
10 +import android.os.AsyncTask;
11 +import android.os.Bundle;
12 +import android.util.Log;
13 +import android.view.View;
14 +import android.widget.Button;
15 +import android.widget.EditText;
16 +import android.widget.Toast;
17 +
18 +
19 +import java.util.ArrayList;
20 +
21 +public class MainActivity extends AppCompatActivity {
22 + /*
23 + * 데이터 수집용 페이지의 메인 액티비티 입니다.
24 + * 이 화면에서 사진 등록 페이지로 이동하거나, 사용자 등록 후 데이터 수집을 할 수 있습니다.
25 + * */
26 + private static final int CAMERA_PERMISSION = 1;
27 + private static final int REQ_RECORDING_PERMISSION = 1;
28 +
29 + @Override
30 + protected void onCreate(Bundle savedInstanceState) {
31 + super.onCreate(savedInstanceState);
32 + setContentView(R.layout.activity_main);
33 + grantPermissions();
34 + Button mButton;
35 + Button imgActivity;
36 + final EditText mEdit;
37 +
38 + Button fab = findViewById(R.id.fab);
39 + fab.setOnClickListener(new View.OnClickListener() {
40 + @Override
41 + public void onClick(View view) {
42 + Log.i("MainActivity","화면 전환");
43 + Intent intent = new Intent(MainActivity.this, RecordActivity.class);
44 + startActivity(intent);
45 + }
46 + });
47 +
48 + mEdit = (EditText)findViewById(R.id.userId);
49 + mButton = (Button)findViewById(R.id.enroll_user);
50 + mButton.setOnClickListener(
51 + new View.OnClickListener()
52 + {
53 + public void onClick(View view)
54 + {
55 + Singleton.getInstance().setUserId(mEdit.getText().toString());
56 + String url = "http://victoria.khunet.net:5900/user";
57 + final AsyncTask<Void, Void, String> execute = new NetworkTask(url, Singleton.getInstance().getUserId()).execute();
58 +
59 + Log.v("UserId", Singleton.getInstance().getUserId());
60 + }
61 + });
62 +
63 + imgActivity = (Button)findViewById(R.id.img_activity);
64 + imgActivity.setOnClickListener(new View.OnClickListener()
65 + {
66 + public void onClick(View view) {
67 + Intent intent = new Intent(MainActivity.this, SetUserImgActivity.class);
68 + startActivity(intent);
69 + }
70 + });
71 +
72 + }
73 +
74 + public boolean grantPermissions() {
75 + ArrayList<String> permissions_array = new ArrayList<>();
76 + if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
77 + permissions_array.add(Manifest.permission.WRITE_EXTERNAL_STORAGE);
78 + permissions_array.add(Manifest.permission.READ_EXTERNAL_STORAGE);
79 + }
80 + if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
81 + permissions_array.add(Manifest.permission.RECORD_AUDIO);
82 + }
83 + if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
84 + permissions_array.add(Manifest.permission.CAMERA);
85 + }
86 + if(!permissions_array.isEmpty()) {
87 + String[] permissions = new String[permissions_array.size()];
88 + permissions_array.toArray(permissions);
89 + //Callback으로 onRequestPermissionsResult 함수가 실행됨
90 + ActivityCompat.requestPermissions(this, permissions, REQ_RECORDING_PERMISSION);
91 + ActivityCompat.requestPermissions(this, permissions, CAMERA_PERMISSION);
92 + return false;
93 + } else {
94 + return true;
95 + }
96 + }
97 +
98 + @Override
99 + public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
100 + super.onRequestPermissionsResult(requestCode, permissions, grantResults);
101 + if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED){
102 + Log.v(" ","Permission: " + permissions[0] + "was " + grantResults[0]);
103 + if (requestCode == REQ_RECORDING_PERMISSION) {
104 + for (int i = 0; i < grantResults.length ; i++) {
105 + if (grantResults[i] < 0) {
106 + Toast.makeText(MainActivity.this, "해당 권한을 활성화하셔야 합니다.",Toast.LENGTH_SHORT).show();
107 + return;
108 + }
109 + }
110 + Log.v("info : ","mic permission");
111 + } else if (requestCode == CAMERA_PERMISSION) {
112 + for (int i = 0; i < grantResults.length ; i++) {
113 + if (grantResults[i] < 0) {
114 + Toast.makeText(MainActivity.this, "해당 권한을 활성화하셔야 합니다.",Toast.LENGTH_SHORT).show();
115 + return;
116 + }
117 + }
118 + Log.v("info : ","mic permission");
119 + }
120 + }
121 + }
122 +
123 +}
124 +
1 +package com.example.cctedv;
2 +
3 +import android.os.AsyncTask;
4 +import android.util.Log;
5 +
6 +import java.io.File;
7 +import java.io.IOException;
8 +import okhttp3.MultipartBody;
9 +import okhttp3.OkHttpClient;
10 +import okhttp3.Request;
11 +import okhttp3.RequestBody;
12 +import okhttp3.Response;
13 +
14 +public class NetworkTask extends AsyncTask<Void, Void, String> {
15 + /*
16 + * 이 NetworkTask Class를 기반으로 http 통신을 이용하여 api를 호출 할 수 있습니다.
17 + * 이 NetworkTask Class는 사용자 프레임데이터를 송신하거나, 사용자 아이디를 등록할 때 사용됩니다.
18 + * */
19 + private String url;
20 + private String data;
21 + private File mFiles = null;
22 + private String mDate = null;
23 +
24 + public NetworkTask(String url, String data) {
25 + this.url = url;
26 + this.data = data;
27 + }
28 + public NetworkTask(String url, String data, File mFiles, String mDate) {
29 + this.url = url;
30 + this.data = data;
31 + this.mFiles = mFiles;
32 + this.mDate = mDate;
33 + }
34 +
35 + @Override
36 + protected String doInBackground(Void... params) {
37 +
38 + if(mFiles == null) {
39 + RequestBody requestBody = new MultipartBody.Builder().setType(MultipartBody.FORM)
40 + .addFormDataPart("userId", data)
41 + .addFormDataPart("userToken", "")
42 + .build();
43 +
44 + OkHttpClient client = new OkHttpClient();
45 + Request request = new Request.Builder().url(url).post(requestBody).build();
46 + Response response = null;
47 + try {
48 + response = client.newCall(request).execute();
49 + } catch (IOException e) {
50 + e.printStackTrace();
51 + }
52 + if (response != null)
53 + Log.i("RES", response.toString());
54 + } else {
55 + Log.i("DATA SIZE ", String.valueOf(this.data.length()));
56 + RequestBody requestBody = new MultipartBody.Builder().setType(MultipartBody.FORM)
57 + .addFormDataPart("befEncoding", this.data)
58 + .addFormDataPart("userId", Singleton.getInstance().getUserId())
59 + .addFormDataPart("timeStamp", this.mDate)
60 + .build();
61 +
62 +
63 + OkHttpClient client = new OkHttpClient();
64 + Request request = new Request.Builder().url(url).post(requestBody).build();
65 + Response response = null;
66 + try {
67 + response = client.newCall(request).execute();
68 + } catch (IOException e) {
69 + e.printStackTrace();
70 + }
71 + if (response != null)
72 + Log.i("RES", response.toString());
73 + }
74 + return "hello";
75 + }
76 +
77 + @Override
78 + protected void onPostExecute(String s) {
79 + super.onPostExecute(s);
80 + if(s != null)
81 + Log.i("RESPONSE : ", s);
82 + }
83 +
84 + @Override
85 + protected void onPreExecute() {
86 +
87 + }
88 +}
1 +package com.example.cctedv;
2 +
3 +import android.app.Activity;
4 +import android.graphics.Bitmap;
5 +import android.graphics.BitmapFactory;
6 +import android.graphics.Matrix;
7 +import android.graphics.SurfaceTexture;
8 +import android.hardware.Camera;
9 +import android.os.Build;
10 +import android.os.Bundle;
11 +import android.os.Environment;
12 +import android.util.Log;
13 +import android.view.Gravity;
14 +import android.view.TextureView;
15 +import android.widget.FrameLayout;
16 +
17 +import java.io.ByteArrayOutputStream;
18 +import java.io.File;
19 +import java.io.FileNotFoundException;
20 +import java.io.FileOutputStream;
21 +import java.io.IOException;
22 +import java.text.DateFormat;
23 +import java.text.SimpleDateFormat;
24 +import java.util.Base64;
25 +import java.util.Date;
26 +import java.util.TimeZone;
27 +
28 +import androidx.annotation.RequiresApi;
29 +
30 +public class RecordActivity extends Activity implements TextureView.SurfaceTextureListener {
31 + private Camera mCamera;
32 + private TextureView mTextureView;
33 +
34 + private String mOutputFile; // 파일
35 + private DateFormat mDateFormat;
36 + private String mDate;
37 + private String mUserName = "victoria";
38 + public File directory;
39 + private FileOutputStream mFileOutputStream;
40 + private File mFiles;
41 + private boolean isCameraOpen = false;
42 +
43 + private int mUnitTime = 2000;
44 + private int mRemainingFileSize;
45 + /*
46 + * 이 RecordActivity는 사용자별 데이터 프레임을 일정 unitTime별로 서버에 전송합니다.
47 + * */
48 +
49 + @Override
50 + protected void onCreate(Bundle savedInstanceState) {
51 + super.onCreate(savedInstanceState);
52 + setContentView(R.layout.activity_record);
53 +
54 + mTextureView = new TextureView(this);
55 + mTextureView.setSurfaceTextureListener(this);
56 +
57 +
58 + setContentView(mTextureView);
59 + }
60 +
61 +
62 + public void settingVideoInfo() {
63 + TimeZone mTimeZone = TimeZone.getDefault();
64 + mDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
65 + mDateFormat.setTimeZone(mTimeZone);
66 + mDate = mDateFormat.format(new Date());
67 + mOutputFile = Environment.getExternalStorageDirectory().getAbsolutePath() + "/CCTedV" + "/" + mUserName + "_" + mDate;
68 + Log.i("PATH :: ", mOutputFile);
69 + mFiles = new File(mOutputFile);
70 +
71 + mRemainingFileSize = calculateGap(mDate)*44100*2;
72 + try {
73 + mFileOutputStream = new FileOutputStream(mOutputFile);
74 + } catch (FileNotFoundException e) {
75 + e.printStackTrace();
76 + }
77 + }
78 +
79 + public void makeDir(){
80 + directory = new File(Environment.getExternalStorageDirectory() + File.separator + "CCTedV");
81 + boolean success = true;
82 + if (!directory.exists()) {
83 + success = directory.mkdirs();
84 + }
85 + if (success) {
86 + Log.v("FILE", "Directory is exist");
87 + } else {
88 + Log.e("FILE", "Directory not created");
89 + }
90 + }
91 +
92 + @Override
93 + public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
94 + Log.i("hello", "aaa");
95 + makeDir();
96 + settingVideoInfo();
97 +
98 + isCameraOpen = true;
99 + mCamera = Camera.open();
100 +
101 + Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
102 + mTextureView.setLayoutParams(new FrameLayout.LayoutParams(
103 + previewSize.width, previewSize.height, Gravity.CENTER));
104 +
105 + try {
106 + mCamera.setPreviewTexture(surface);
107 + } catch (IOException t) {
108 + }
109 +
110 + mCamera.startPreview();
111 +
112 + mCamera.setPreviewCallback(new Camera.PreviewCallback() {
113 +
114 + @RequiresApi(api = Build.VERSION_CODES.O)
115 + public void onPreviewFrame(final byte[] data, final Camera camera) {
116 + if(isCameraOpen) {
117 + if(!accumulateFile(data)) {
118 + if (mFiles.exists()) {
119 + try {
120 + //파일 저장
121 + mFileOutputStream.close();
122 + mFileOutputStream.flush();
123 + mFileOutputStream = null;
124 + mFiles.delete();
125 + // URL 설정.
126 + String url = "http://victoria.khunet.net:5900/upload";
127 +
128 + mDate = mDateFormat.format(new Date());
129 + File photo=new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/CCTedV" + "/" + "img" + "_" + mDate+".jpeg");
130 + FileOutputStream fos = new FileOutputStream(photo);
131 + Bitmap bmp = mTextureView.getBitmap();
132 + ByteArrayOutputStream stream = new ByteArrayOutputStream();
133 + bmp.compress(Bitmap.CompressFormat.JPEG, 100, stream);
134 + byte[] currentData = stream.toByteArray();
135 +
136 +// fos = new FileOutputStream(photo);
137 +// fos.write(currentData);
138 +// fos.flush();
139 +// fos.close();
140 +
141 + String s = Base64.getEncoder().encodeToString(currentData);
142 +
143 + // AsyncTask를 통해 HttpURLConnection 수행.
144 + (new NetworkTask(url, s, mFiles, mDate)).execute();
145 + photo.delete();
146 +
147 +
148 + } catch (IOException e) {
149 + e.printStackTrace();
150 + }
151 +
152 + //다시 시작
153 + settingVideoInfo();
154 + }
155 + }
156 + }
157 + // Process the contents of byte for whatever you need
158 + }
159 + });
160 + }
161 +
162 + @Override
163 + public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
164 +
165 + }
166 +
167 + @Override
168 + public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
169 + isCameraOpen = false;
170 + try {
171 + mFileOutputStream.close();
172 + mFileOutputStream = null;
173 + } catch (IOException e) {
174 + e.printStackTrace();
175 + }
176 + mCamera.stopPreview();
177 + mCamera.release();
178 + return true;
179 + }
180 +
181 + private boolean accumulateFile(byte[] byteBuffer) {
182 + try {
183 + if(mRemainingFileSize >0) {
184 + if(byteBuffer != null) {
185 + mFileOutputStream.write(byteBuffer);
186 +
187 + mRemainingFileSize -= byteBuffer.length * 2;
188 + return true;
189 + } else {
190 +// Toast.makeText(RecorderService.this, "Playing...", Toast.LENGTH_SHORT).show();
191 + }
192 + }
193 + else {
194 + mFileOutputStream.close();
195 + }
196 + }catch (IOException e){
197 + Log.e("file out" , e.toString());
198 + }
199 + return false;
200 + }
201 +
202 + private int calculateGap(String date) {
203 + String time = date.substring(11, date.length());
204 + final int hour = Integer.parseInt(time.substring(0, 2));
205 + final int min = Integer.parseInt(time.substring(3, 5));
206 + final int sec = Integer.parseInt(time.substring(6, time.length()));
207 + final int timeInSecond = hour * 3600 + min * 60 + sec;
208 + final int gap = mUnitTime - (timeInSecond % mUnitTime);
209 + return gap;
210 + }
211 +
212 + @Override
213 + public void onSurfaceTextureUpdated(SurfaceTexture surface) {
214 +
215 + }
216 +}
1 +package com.example.cctedv;
2 +
3 +import android.content.DialogInterface;
4 +import android.content.Intent;
5 +import android.database.Cursor;
6 +import android.graphics.Bitmap;
7 +import android.graphics.BitmapFactory;
8 +import android.net.Uri;
9 +import android.os.Bundle;
10 +import android.provider.MediaStore;
11 +import android.util.Log;
12 +import android.view.LayoutInflater;
13 +import android.view.View;
14 +import android.widget.Button;
15 +import android.widget.EditText;
16 +import android.widget.ListView;
17 +import android.widget.Toast;
18 +
19 +import java.io.FileNotFoundException;
20 +import java.io.InputStream;
21 +import java.util.ArrayList;
22 +
23 +import androidx.appcompat.app.AlertDialog;
24 +import androidx.appcompat.app.AppCompatActivity;
25 +
26 +public class SetUserImgActivity extends AppCompatActivity {
27 + private ListView mListView;
28 + private ImgListAdapter mAdapter;
29 + private ArrayList<ImgItem> mImgList = null;
30 + private Button mImgAddButton;
31 + private Button uploadButton;
32 + private static int RESULT_LOAD_IMG = 1;
33 +
34 +
35 + InputStream imageStream;
36 + EditText mItemName;
37 +
38 + @Override
39 + protected void onCreate(Bundle savedInstanceState) {
40 + super.onCreate(savedInstanceState);
41 + setContentView(R.layout.activity_img);
42 +
43 + mListView = findViewById(R.id.list_view);
44 + mImgAddButton = (Button)findViewById(R.id.img_upload);
45 + mImgList = new ArrayList<>();
46 + ImgItem t = new ImgItem();
47 +// t.setmFilename("");
48 + mImgList.add(t);
49 +
50 + mListView.setAdapter(mAdapter);
51 +
52 + mImgAddButton.setOnClickListener(new View.OnClickListener()
53 + {
54 + public void onClick(View view) {
55 + openBuilder();
56 + }
57 + });
58 +
59 + }
60 +// @Override
61 +// protected void onActivityResult(int reqCode, int resultCode, Intent data) {
62 +// super.onActivityResult(reqCode, resultCode, data);
63 +// if (resultCode == RESULT_OK) {
64 +// try {
65 +// final Uri imageUri = data.getData();
66 +// imageStream = getContentResolver().openInputStream(imageUri);
67 +//
68 +// } catch (FileNotFoundException e) {
69 +// e.printStackTrace();
70 +//// Toast.makeText(PostImage.this, "Something went wrong", Toast.LENGTH_LONG).show();
71 +// }
72 +//
73 +// }else {
74 +//// Toast.makeText(PostImage.this, "You haven't picked Image",Toast.LENGTH_LONG).show();
75 +// }
76 +// }
77 +
78 + public void openBuilder() {
79 + AlertDialog.Builder builder = new AlertDialog.Builder(this);
80 + // Get the layout inflater
81 + LayoutInflater inflater = this.getLayoutInflater();
82 + View view = inflater.inflate(R.layout.dialog_img_upload, null);
83 +
84 +// AlertDialog alert = builder.create();
85 +// mItemName = (EditText) view.findViewById(R.id.filename);
86 +
87 +// uploadButton = (Button) view.findViewById(R.id.select_img);
88 +// uploadButton.setOnClickListener(new View.OnClickListener() {
89 +// @Override
90 +// public void onClick(View v) {
91 +// Log.i("??","!");
92 +// Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
93 +// photoPickerIntent.setType("image/*");
94 +// startActivityForResult(photoPickerIntent, RESULT_LOAD_IMG);
95 +// }
96 +// });
97 +
98 + builder.setView(inflater.inflate(R.layout.dialog_img_upload, null))
99 + // Add action buttons
100 + .setNegativeButton("UPLOAD", new DialogInterface.OnClickListener() {
101 + public void onClick(DialogInterface dialog, int id) {
102 +// Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
103 +// photoPickerIntent.setType("image/*");
104 +// startActivityForResult(photoPickerIntent, RESULT_LOAD_IMG);
105 + Intent i = new Intent(
106 + Intent.ACTION_PICK,
107 + android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
108 +
109 + startActivityForResult(i, RESULT_LOAD_IMG);
110 + }
111 + });
112 +
113 +
114 +// builder.setView(inflater.inflate(R.layout.dialog_img_upload, null))
115 +// // Add action buttons
116 +// .setPositiveButton("OK", new DialogInterface.OnClickListener() {
117 +// public void onClick(DialogInterface dialog, int id) {
118 +// ImgItem t = new ImgItem();
119 +//// t.setmFilename(mItemName.getText().toString());
120 +//// Log.i("name : ", mItemName.getText().toString());
121 +// final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
122 +// t.setSelectedImage(selectedImage);
123 +// Log.i("?", selectedImage.toString());
124 +// mImgList.add(t);
125 +// mAdapter = new ImgListAdapter(mImgList);
126 +//
127 +// mListView.setAdapter(mAdapter);
128 +// }
129 +// });
130 +
131 +
132 + builder.show();
133 +
134 + }
135 +
136 + @Override
137 + protected void onActivityResult(int requestCode, int resultCode, Intent data) {
138 + super.onActivityResult(requestCode, resultCode, data);
139 +
140 + if (requestCode == RESULT_LOAD_IMG && resultCode == RESULT_OK && null != data) {
141 + Uri selectedImage = data.getData();
142 + String[] filePathColumn = { MediaStore.Images.Media.DATA };
143 +
144 + Cursor cursor = getContentResolver().query(selectedImage,
145 + filePathColumn, null, null, null);
146 + cursor.moveToFirst();
147 +
148 + int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
149 + String picturePath = cursor.getString(columnIndex);
150 + cursor.close();
151 +
152 + ImgItem t = new ImgItem();
153 +// t.setmFilename(mItemName.getText().toString());
154 +// Log.i("name : ", mItemName.getText().toString());
155 + Bitmap selectedImg = BitmapFactory.decodeFile(picturePath);
156 + t.setSelectedImage(selectedImg);
157 + mImgList.add(t);
158 + mAdapter = new ImgListAdapter(mImgList);
159 +
160 + mListView.setAdapter(mAdapter);
161 +
162 +// ImageView imageView = (ImageView) findViewById(R.id.imgView);
163 +// imageView.setImageBitmap(BitmapFactory.decodeFile(picturePath));
164 +
165 + }
166 +
167 +
168 + }
169 +
170 + @Override
171 + public void onResume() {
172 + super.onResume();
173 + mAdapter = new ImgListAdapter(mImgList);
174 + Log.d("size", String.valueOf(mImgList.size()));
175 + mListView.setAdapter(mAdapter);
176 + }
177 +}
1 +package com.example.cctedv;
2 +
3 +public class Singleton {
4 + private String userId;
5 + public String getUserId()
6 + {
7 + return userId;
8 + }
9 + public void setUserId(String data)
10 + {
11 + this.userId = data;
12 + }
13 + private static Singleton instance = null;
14 + /*
15 + * 이 Singleton은 애플리케이션이 시작될 때 어떤 클래스가 최초 한번만 메모리를 할당하고(Static) 그 메모리에 인스턴스를 만들어 사용하는 클래스입니다.
16 + * 유저 정보를 저장하고 접근할 때, 사용됩니다.
17 + * */
18 + public static synchronized Singleton getInstance(){
19 + if(null == instance){
20 + instance = new Singleton();
21 + }
22 + return instance;
23 + }
24 +}
1 +<vector android:height="24dp" android:tint="#FFFFFF"
2 + android:viewportHeight="24.0" android:viewportWidth="24.0"
3 + android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
4 + <path android:fillColor="#010101" android:pathData="M12,12m-8,0a8,8 0,1 1,16 0a8,8 0,1 1,-16 0"/>
5 +</vector>
1 +<vector xmlns:android="http://schemas.android.com/apk/res/android"
2 + xmlns:aapt="http://schemas.android.com/aapt"
3 + android:width="108dp"
4 + android:height="108dp"
5 + android:viewportWidth="108"
6 + android:viewportHeight="108">
7 + <path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
8 + <aapt:attr name="android:fillColor">
9 + <gradient
10 + android:endX="85.84757"
11 + android:endY="92.4963"
12 + android:startX="42.9492"
13 + android:startY="49.59793"
14 + android:type="linear">
15 + <item
16 + android:color="#44000000"
17 + android:offset="0.0" />
18 + <item
19 + android:color="#00000000"
20 + android:offset="1.0" />
21 + </gradient>
22 + </aapt:attr>
23 + </path>
24 + <path
25 + android:fillColor="#FFFFFF"
26 + android:fillType="nonZero"
27 + android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
28 + android:strokeWidth="1"
29 + android:strokeColor="#00000000" />
30 +</vector>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<vector xmlns:android="http://schemas.android.com/apk/res/android"
3 + android:width="108dp"
4 + android:height="108dp"
5 + android:viewportWidth="108"
6 + android:viewportHeight="108">
7 + <path
8 + android:fillColor="#3DDC84"
9 + android:pathData="M0,0h108v108h-108z" />
10 + <path
11 + android:fillColor="#00000000"
12 + android:pathData="M9,0L9,108"
13 + android:strokeWidth="0.8"
14 + android:strokeColor="#33FFFFFF" />
15 + <path
16 + android:fillColor="#00000000"
17 + android:pathData="M19,0L19,108"
18 + android:strokeWidth="0.8"
19 + android:strokeColor="#33FFFFFF" />
20 + <path
21 + android:fillColor="#00000000"
22 + android:pathData="M29,0L29,108"
23 + android:strokeWidth="0.8"
24 + android:strokeColor="#33FFFFFF" />
25 + <path
26 + android:fillColor="#00000000"
27 + android:pathData="M39,0L39,108"
28 + android:strokeWidth="0.8"
29 + android:strokeColor="#33FFFFFF" />
30 + <path
31 + android:fillColor="#00000000"
32 + android:pathData="M49,0L49,108"
33 + android:strokeWidth="0.8"
34 + android:strokeColor="#33FFFFFF" />
35 + <path
36 + android:fillColor="#00000000"
37 + android:pathData="M59,0L59,108"
38 + android:strokeWidth="0.8"
39 + android:strokeColor="#33FFFFFF" />
40 + <path
41 + android:fillColor="#00000000"
42 + android:pathData="M69,0L69,108"
43 + android:strokeWidth="0.8"
44 + android:strokeColor="#33FFFFFF" />
45 + <path
46 + android:fillColor="#00000000"
47 + android:pathData="M79,0L79,108"
48 + android:strokeWidth="0.8"
49 + android:strokeColor="#33FFFFFF" />
50 + <path
51 + android:fillColor="#00000000"
52 + android:pathData="M89,0L89,108"
53 + android:strokeWidth="0.8"
54 + android:strokeColor="#33FFFFFF" />
55 + <path
56 + android:fillColor="#00000000"
57 + android:pathData="M99,0L99,108"
58 + android:strokeWidth="0.8"
59 + android:strokeColor="#33FFFFFF" />
60 + <path
61 + android:fillColor="#00000000"
62 + android:pathData="M0,9L108,9"
63 + android:strokeWidth="0.8"
64 + android:strokeColor="#33FFFFFF" />
65 + <path
66 + android:fillColor="#00000000"
67 + android:pathData="M0,19L108,19"
68 + android:strokeWidth="0.8"
69 + android:strokeColor="#33FFFFFF" />
70 + <path
71 + android:fillColor="#00000000"
72 + android:pathData="M0,29L108,29"
73 + android:strokeWidth="0.8"
74 + android:strokeColor="#33FFFFFF" />
75 + <path
76 + android:fillColor="#00000000"
77 + android:pathData="M0,39L108,39"
78 + android:strokeWidth="0.8"
79 + android:strokeColor="#33FFFFFF" />
80 + <path
81 + android:fillColor="#00000000"
82 + android:pathData="M0,49L108,49"
83 + android:strokeWidth="0.8"
84 + android:strokeColor="#33FFFFFF" />
85 + <path
86 + android:fillColor="#00000000"
87 + android:pathData="M0,59L108,59"
88 + android:strokeWidth="0.8"
89 + android:strokeColor="#33FFFFFF" />
90 + <path
91 + android:fillColor="#00000000"
92 + android:pathData="M0,69L108,69"
93 + android:strokeWidth="0.8"
94 + android:strokeColor="#33FFFFFF" />
95 + <path
96 + android:fillColor="#00000000"
97 + android:pathData="M0,79L108,79"
98 + android:strokeWidth="0.8"
99 + android:strokeColor="#33FFFFFF" />
100 + <path
101 + android:fillColor="#00000000"
102 + android:pathData="M0,89L108,89"
103 + android:strokeWidth="0.8"
104 + android:strokeColor="#33FFFFFF" />
105 + <path
106 + android:fillColor="#00000000"
107 + android:pathData="M0,99L108,99"
108 + android:strokeWidth="0.8"
109 + android:strokeColor="#33FFFFFF" />
110 + <path
111 + android:fillColor="#00000000"
112 + android:pathData="M19,29L89,29"
113 + android:strokeWidth="0.8"
114 + android:strokeColor="#33FFFFFF" />
115 + <path
116 + android:fillColor="#00000000"
117 + android:pathData="M19,39L89,39"
118 + android:strokeWidth="0.8"
119 + android:strokeColor="#33FFFFFF" />
120 + <path
121 + android:fillColor="#00000000"
122 + android:pathData="M19,49L89,49"
123 + android:strokeWidth="0.8"
124 + android:strokeColor="#33FFFFFF" />
125 + <path
126 + android:fillColor="#00000000"
127 + android:pathData="M19,59L89,59"
128 + android:strokeWidth="0.8"
129 + android:strokeColor="#33FFFFFF" />
130 + <path
131 + android:fillColor="#00000000"
132 + android:pathData="M19,69L89,69"
133 + android:strokeWidth="0.8"
134 + android:strokeColor="#33FFFFFF" />
135 + <path
136 + android:fillColor="#00000000"
137 + android:pathData="M19,79L89,79"
138 + android:strokeWidth="0.8"
139 + android:strokeColor="#33FFFFFF" />
140 + <path
141 + android:fillColor="#00000000"
142 + android:pathData="M29,19L29,89"
143 + android:strokeWidth="0.8"
144 + android:strokeColor="#33FFFFFF" />
145 + <path
146 + android:fillColor="#00000000"
147 + android:pathData="M39,19L39,89"
148 + android:strokeWidth="0.8"
149 + android:strokeColor="#33FFFFFF" />
150 + <path
151 + android:fillColor="#00000000"
152 + android:pathData="M49,19L49,89"
153 + android:strokeWidth="0.8"
154 + android:strokeColor="#33FFFFFF" />
155 + <path
156 + android:fillColor="#00000000"
157 + android:pathData="M59,19L59,89"
158 + android:strokeWidth="0.8"
159 + android:strokeColor="#33FFFFFF" />
160 + <path
161 + android:fillColor="#00000000"
162 + android:pathData="M69,19L69,89"
163 + android:strokeWidth="0.8"
164 + android:strokeColor="#33FFFFFF" />
165 + <path
166 + android:fillColor="#00000000"
167 + android:pathData="M79,19L79,89"
168 + android:strokeWidth="0.8"
169 + android:strokeColor="#33FFFFFF" />
170 +</vector>
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
3 + android:layout_width="match_parent"
4 + android:layout_height="match_parent"
5 + xmlns:tools="http://schemas.android.com/tools">
6 + <LinearLayout
7 + android:id="@+id/rl_down"
8 + android:layout_width="match_parent"
9 + android:layout_height="match_parent"
10 + tools:context=".SetUserImgActivity"
11 + android:orientation="vertical"
12 + android:background="@drawable/pic_back02"
13 + tools:ignore="MissingConstraints">
14 + <Button
15 + android:id="@+id/img_upload"
16 + android:layout_width="137dp"
17 + android:layout_height="137dp"
18 + android:layout_marginTop="40dp"
19 + android:layout_gravity="center"
20 + android:background="@drawable/btn_upload"
21 + />
22 + <ListView
23 + android:id="@+id/list_view"
24 + android:layout_width="match_parent"
25 + android:layout_height="match_parent"/>
26 + </LinearLayout>
27 +
28 +</androidx.constraintlayout.widget.ConstraintLayout>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
3 + xmlns:app="http://schemas.android.com/apk/res-auto"
4 + xmlns:tools="http://schemas.android.com/tools"
5 + android:layout_width="match_parent"
6 + android:layout_height="match_parent"
7 + tools:context=".MainActivity">
8 +
9 + <LinearLayout
10 + android:id="@+id/rl"
11 + android:layout_width="match_parent"
12 + android:layout_height="560dp"
13 + android:background="@drawable/pic_back"
14 + android:orientation="vertical"
15 + app:layout_constraintStart_toStartOf="parent"
16 + app:layout_constraintTop_toTopOf="parent"
17 + tools:context=".MainActivity">
18 +
19 + <LinearLayout
20 + android:id="@+id/rl_up"
21 + android:layout_width="match_parent"
22 + android:layout_height="300dp"
23 + android:orientation="vertical"
24 + tools:context=".MainActivity">
25 +
26 + <TextView
27 + android:layout_width="315dp"
28 + android:layout_height="wrap_content"
29 + android:layout_gravity="center"
30 + android:layout_marginTop="45dp"
31 + android:fontFamily="@res/font/nanumsquarer"
32 + android:gravity="left"
33 + android:text="User ID Setting"
34 + android:textColor="#FFFFFF"
35 + android:textSize="24dp"
36 + />
37 +
38 + <EditText
39 + android:id="@+id/userId"
40 + android:layout_width="315dp"
41 + android:layout_height="56dp"
42 + android:layout_gravity="center"
43 + android:layout_marginTop="45dp"
44 + android:backgroundTint = "#FFFFFF"/>
45 +
46 + <Button
47 + android:id="@+id/enroll_user"
48 + android:layout_width="315dp"
49 + android:layout_height="56dp"
50 + android:layout_gravity="center"
51 + android:layout_marginTop="45dp"
52 + android:background="@drawable/btn_enroll" />
53 + </LinearLayout>
54 +
55 + <LinearLayout
56 + android:id="@+id/rl_down"
57 + android:layout_width="match_parent"
58 + android:layout_height="260dp"
59 + android:orientation="vertical"
60 + tools:context=".MainActivity">
61 +
62 + <TextView
63 + android:layout_width="315dp"
64 + android:layout_height="wrap_content"
65 + android:layout_gravity="center"
66 + android:layout_marginTop="20dp"
67 + android:fontFamily="@res/font/nanumsquarer"
68 + android:gravity="center"
69 + android:text="Recording"
70 + android:textColor="#FFFFFF"
71 + android:textSize="30dp" />
72 +
73 + <LinearLayout
74 + android:layout_width="match_parent"
75 + android:layout_height="0dp"
76 + android:layout_weight="1"
77 + android:gravity="bottom"
78 + android:paddingTop="10dp"
79 + android:layout_marginTop="23dp"
80 + android:orientation="horizontal">
81 +
82 + <androidx.constraintlayout.widget.ConstraintLayout
83 + android:layout_width="0dp"
84 + android:layout_height="match_parent"
85 + android:layout_weight="1"
86 + android:gravity="center"
87 + android:orientation="vertical">
88 + </androidx.constraintlayout.widget.ConstraintLayout>
89 + <androidx.constraintlayout.widget.ConstraintLayout
90 + android:layout_width="0dp"
91 + android:layout_height="match_parent"
92 + android:layout_weight="2"
93 + android:gravity="center"
94 + android:orientation="vertical">
95 + <Button
96 + android:id="@+id/fab"
97 + android:layout_width="137dp"
98 + android:layout_height="137dp"
99 + android:layout_gravity="center"
100 + android:background="@drawable/btn_cam"
101 + app:layout_constraintEnd_toEndOf="parent"
102 + app:layout_constraintStart_toStartOf="parent"
103 + tools:layout_editor_absoluteY="0dp" />
104 + </androidx.constraintlayout.widget.ConstraintLayout>
105 + <androidx.constraintlayout.widget.ConstraintLayout
106 + android:layout_width="0dp"
107 + android:layout_height="match_parent"
108 + android:layout_weight="1"
109 + android:gravity="center"
110 + android:orientation="vertical">
111 +
112 + <Button
113 + android:id="@+id/img_activity"
114 + android:layout_width="58dp"
115 + android:layout_height="58dp"
116 + android:layout_gravity="center"
117 + android:background="@drawable/btn_plus"
118 + app:layout_constraintEnd_toEndOf="parent"
119 + app:layout_constraintStart_toStartOf="parent"
120 + app:layout_constraintBottom_toBottomOf="parent"
121 + android:layout_marginBottom="15dp"
122 + tools:layout_editor_absoluteY="92dp" />
123 +
124 + </androidx.constraintlayout.widget.ConstraintLayout>
125 + </LinearLayout>
126 +
127 + </LinearLayout>
128 + </LinearLayout>
129 +
130 + <androidx.coordinatorlayout.widget.CoordinatorLayout
131 + android:id="@+id/coordinatorLayout"
132 + android:layout_width="match_parent"
133 + android:layout_height="wrap_content"
134 + app:layout_constraintBottom_toBottomOf="parent"
135 + app:layout_constraintEnd_toStartOf="parent"
136 + app:layout_constraintStart_toStartOf="parent">
137 +
138 + <com.google.android.material.bottomappbar.BottomAppBar
139 + android:id="@+id/bottom_app_bar"
140 + android:layout_width="match_parent"
141 + android:layout_height="wrap_content"
142 + android:layout_gravity="bottom"
143 + app:backgroundTint="#303F9F"
144 + app:fabAlignmentMode="center" />
145 +
146 + </androidx.coordinatorlayout.widget.CoordinatorLayout>
147 +</androidx.constraintlayout.widget.ConstraintLayout>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
3 + xmlns:app="http://schemas.android.com/apk/res-auto"
4 + xmlns:tools="http://schemas.android.com/tools"
5 + android:layout_width="match_parent"
6 + android:layout_height="match_parent"
7 + tools:context=".RecordActivity">
8 + <TextureView
9 + android:id="@+id/textureView1"
10 + android:layout_width="wrap_content"
11 + android:layout_height="wrap_content"
12 + android:layout_alignParentTop="true"
13 + android:layout_centerHorizontal="true" />
14 +
15 +
16 +
17 +</RelativeLayout>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
3 + android:orientation="vertical"
4 + android:layout_width="300dp"
5 + android:layout_height="230dp">
6 +
7 + <TextView
8 + android:layout_width="match_parent"
9 + android:layout_height="64dp"
10 + android:layout_gravity="center"
11 + android:background="#193B81"
12 + android:textColor="#FFFFFF"
13 + android:fontFamily="@res/font/nanumsquarer"
14 + android:text="이미지 업로드"
15 + android:gravity="center"
16 + android:textSize="20dp"
17 + />
18 +
19 +
20 +</LinearLayout>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
3 + android:layout_width="match_parent"
4 + android:layout_height="wrap_content"
5 + android:orientation="vertical"
6 + android:paddingTop="20dp"
7 + android:paddingRight="23dp"
8 + android:paddingLeft="23dp">
9 + <LinearLayout
10 + android:layout_width="match_parent"
11 + android:layout_height="wrap_content"
12 + android:orientation="horizontal"
13 + android:padding="5dp">
14 + <ImageView
15 + android:id="@+id/img_source"
16 + android:layout_width="150dp"
17 + android:layout_height="150dp"/>
18 + <LinearLayout
19 + android:layout_marginLeft="3dp"
20 + android:layout_width="match_parent"
21 + android:layout_height="wrap_content"
22 + android:layout_marginTop="60dp"
23 + android:orientation="horizontal"
24 + android:layout_marginStart="3dp">
25 + <TextView
26 + android:id="@+id/list_file_name"
27 + android:layout_width="match_parent"
28 + android:layout_height="match_parent"
29 + android:gravity="center"
30 + android:text="권주희"
31 + android:textColor="#000000"
32 + android:textSize="20dp"
33 + />
34 + </LinearLayout>
35 + </LinearLayout>
36 +
37 +
38 +</LinearLayout>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
3 + <background android:drawable="@drawable/ic_launcher_background" />
4 + <foreground android:drawable="@drawable/ic_launcher_foreground" />
5 +</adaptive-icon>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
3 + <background android:drawable="@drawable/ic_launcher_background" />
4 + <foreground android:drawable="@drawable/ic_launcher_foreground" />
5 +</adaptive-icon>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<resources>
3 + <color name="colorPrimary">#6200EE</color>
4 + <color name="colorPrimaryDark">#3700B3</color>
5 + <color name="colorAccent">#03DAC5</color>
6 +</resources>
1 +<resources>
2 + <string name="app_name">CCTedV</string>
3 +</resources>
1 +<resources>
2 +
3 + <!-- Base application theme. -->
4 + <style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
5 + <!-- Customize your theme here. -->
6 + <item name="colorPrimary">@color/colorPrimary</item>
7 + <item name="colorPrimaryDark">@color/colorPrimaryDark</item>
8 + <item name="colorAccent">@color/colorAccent</item>
9 + </style>
10 +
11 +</resources>
1 +package com.example.cctedv;
2 +
3 +import org.junit.Test;
4 +
5 +import static org.junit.Assert.*;
6 +
7 +/**
8 + * Example local unit test, which will execute on the development machine (host).
9 + *
10 + * @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
11 + */
12 +public class ExampleUnitTest {
13 + @Test
14 + public void addition_isCorrect() {
15 + assertEquals(4, 2 + 2);
16 + }
17 +}
...\ No newline at end of file ...\ No newline at end of file
1 +// Top-level build file where you can add configuration options common to all sub-projects/modules.
2 +
3 +buildscript {
4 +
5 + repositories {
6 + google()
7 + jcenter()
8 +
9 + }
10 + dependencies {
11 + classpath 'com.android.tools.build:gradle:3.6.3'
12 +
13 + // NOTE: Do not place your application dependencies here; they belong
14 + // in the individual module build.gradle files
15 + }
16 +}
17 +
18 +allprojects {
19 + repositories {
20 + google()
21 + jcenter()
22 +
23 + }
24 +}
25 +
26 +task clean(type: Delete) {
27 + delete rootProject.buildDir
28 +}
1 +# Project-wide Gradle settings.
2 +# IDE (e.g. Android Studio) users:
3 +# Gradle settings configured through the IDE *will override*
4 +# any settings specified in this file.
5 +# For more details on how to configure your build environment visit
6 +# http://www.gradle.org/docs/current/userguide/build_environment.html
7 +# Specifies the JVM arguments used for the daemon process.
8 +# The setting is particularly useful for tweaking memory settings.
9 +org.gradle.jvmargs=-Xmx1536m
10 +# When configured, Gradle will run in incubating parallel mode.
11 +# This option should only be used with decoupled projects. More details, visit
12 +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 +# org.gradle.parallel=true
14 +# AndroidX package structure to make it clearer which packages are bundled with the
15 +# Android operating system, and which are packaged with your app's APK
16 +# https://developer.android.com/topic/libraries/support-library/androidx-rn
17 +android.useAndroidX=true
18 +# Automatically convert third-party libraries to use AndroidX
19 +android.enableJetifier=true
20 +
1 +#Thu Apr 23 16:51:13 KST 2020
2 +distributionBase=GRADLE_USER_HOME
3 +distributionPath=wrapper/dists
4 +zipStoreBase=GRADLE_USER_HOME
5 +zipStorePath=wrapper/dists
6 +distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
1 +#!/usr/bin/env sh
2 +
3 +##############################################################################
4 +##
5 +## Gradle start up script for UN*X
6 +##
7 +##############################################################################
8 +
9 +# Attempt to set APP_HOME
10 +# Resolve links: $0 may be a link
11 +PRG="$0"
12 +# Need this for relative symlinks.
13 +while [ -h "$PRG" ] ; do
14 + ls=`ls -ld "$PRG"`
15 + link=`expr "$ls" : '.*-> \(.*\)$'`
16 + if expr "$link" : '/.*' > /dev/null; then
17 + PRG="$link"
18 + else
19 + PRG=`dirname "$PRG"`"/$link"
20 + fi
21 +done
22 +SAVED="`pwd`"
23 +cd "`dirname \"$PRG\"`/" >/dev/null
24 +APP_HOME="`pwd -P`"
25 +cd "$SAVED" >/dev/null
26 +
27 +APP_NAME="Gradle"
28 +APP_BASE_NAME=`basename "$0"`
29 +
30 +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 +DEFAULT_JVM_OPTS=""
32 +
33 +# Use the maximum available, or set MAX_FD != -1 to use that value.
34 +MAX_FD="maximum"
35 +
36 +warn () {
37 + echo "$*"
38 +}
39 +
40 +die () {
41 + echo
42 + echo "$*"
43 + echo
44 + exit 1
45 +}
46 +
47 +# OS specific support (must be 'true' or 'false').
48 +cygwin=false
49 +msys=false
50 +darwin=false
51 +nonstop=false
52 +case "`uname`" in
53 + CYGWIN* )
54 + cygwin=true
55 + ;;
56 + Darwin* )
57 + darwin=true
58 + ;;
59 + MINGW* )
60 + msys=true
61 + ;;
62 + NONSTOP* )
63 + nonstop=true
64 + ;;
65 +esac
66 +
67 +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 +
69 +# Determine the Java command to use to start the JVM.
70 +if [ -n "$JAVA_HOME" ] ; then
71 + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 + # IBM's JDK on AIX uses strange locations for the executables
73 + JAVACMD="$JAVA_HOME/jre/sh/java"
74 + else
75 + JAVACMD="$JAVA_HOME/bin/java"
76 + fi
77 + if [ ! -x "$JAVACMD" ] ; then
78 + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 +
80 +Please set the JAVA_HOME variable in your environment to match the
81 +location of your Java installation."
82 + fi
83 +else
84 + JAVACMD="java"
85 + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 +
87 +Please set the JAVA_HOME variable in your environment to match the
88 +location of your Java installation."
89 +fi
90 +
91 +# Increase the maximum file descriptors if we can.
92 +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 + MAX_FD_LIMIT=`ulimit -H -n`
94 + if [ $? -eq 0 ] ; then
95 + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 + MAX_FD="$MAX_FD_LIMIT"
97 + fi
98 + ulimit -n $MAX_FD
99 + if [ $? -ne 0 ] ; then
100 + warn "Could not set maximum file descriptor limit: $MAX_FD"
101 + fi
102 + else
103 + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 + fi
105 +fi
106 +
107 +# For Darwin, add options to specify how the application appears in the dock
108 +if $darwin; then
109 + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 +fi
111 +
112 +# For Cygwin, switch paths to Windows format before running java
113 +if $cygwin ; then
114 + APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 + JAVACMD=`cygpath --unix "$JAVACMD"`
117 +
118 + # We build the pattern for arguments to be converted via cygpath
119 + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 + SEP=""
121 + for dir in $ROOTDIRSRAW ; do
122 + ROOTDIRS="$ROOTDIRS$SEP$dir"
123 + SEP="|"
124 + done
125 + OURCYGPATTERN="(^($ROOTDIRS))"
126 + # Add a user-defined pattern to the cygpath arguments
127 + if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 + fi
130 + # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 + i=0
132 + for arg in "$@" ; do
133 + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 +
136 + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 + else
139 + eval `echo args$i`="\"$arg\""
140 + fi
141 + i=$((i+1))
142 + done
143 + case $i in
144 + (0) set -- ;;
145 + (1) set -- "$args0" ;;
146 + (2) set -- "$args0" "$args1" ;;
147 + (3) set -- "$args0" "$args1" "$args2" ;;
148 + (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 + esac
155 +fi
156 +
157 +# Escape application args
158 +save () {
159 + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 + echo " "
161 +}
162 +APP_ARGS=$(save "$@")
163 +
164 +# Collect all arguments for the java command, following the shell quoting and substitution rules
165 +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 +
167 +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 + cd "$(dirname "$0")"
170 +fi
171 +
172 +exec "$JAVACMD" "$@"
1 +@if "%DEBUG%" == "" @echo off
2 +@rem ##########################################################################
3 +@rem
4 +@rem Gradle startup script for Windows
5 +@rem
6 +@rem ##########################################################################
7 +
8 +@rem Set local scope for the variables with windows NT shell
9 +if "%OS%"=="Windows_NT" setlocal
10 +
11 +set DIRNAME=%~dp0
12 +if "%DIRNAME%" == "" set DIRNAME=.
13 +set APP_BASE_NAME=%~n0
14 +set APP_HOME=%DIRNAME%
15 +
16 +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 +set DEFAULT_JVM_OPTS=
18 +
19 +@rem Find java.exe
20 +if defined JAVA_HOME goto findJavaFromJavaHome
21 +
22 +set JAVA_EXE=java.exe
23 +%JAVA_EXE% -version >NUL 2>&1
24 +if "%ERRORLEVEL%" == "0" goto init
25 +
26 +echo.
27 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 +echo.
29 +echo Please set the JAVA_HOME variable in your environment to match the
30 +echo location of your Java installation.
31 +
32 +goto fail
33 +
34 +:findJavaFromJavaHome
35 +set JAVA_HOME=%JAVA_HOME:"=%
36 +set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 +
38 +if exist "%JAVA_EXE%" goto init
39 +
40 +echo.
41 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 +echo.
43 +echo Please set the JAVA_HOME variable in your environment to match the
44 +echo location of your Java installation.
45 +
46 +goto fail
47 +
48 +:init
49 +@rem Get command-line arguments, handling Windows variants
50 +
51 +if not "%OS%" == "Windows_NT" goto win9xME_args
52 +
53 +:win9xME_args
54 +@rem Slurp the command line arguments.
55 +set CMD_LINE_ARGS=
56 +set _SKIP=2
57 +
58 +:win9xME_args_slurp
59 +if "x%~1" == "x" goto execute
60 +
61 +set CMD_LINE_ARGS=%*
62 +
63 +:execute
64 +@rem Setup the command line
65 +
66 +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 +
68 +@rem Execute Gradle
69 +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 +
71 +:end
72 +@rem End local scope for the variables with windows NT shell
73 +if "%ERRORLEVEL%"=="0" goto mainEnd
74 +
75 +:fail
76 +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 +rem the _cmd.exe /c_ return code!
78 +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 +exit /b 1
80 +
81 +:mainEnd
82 +if "%OS%"=="Windows_NT" endlocal
83 +
84 +:omega
1 +ECLIPSE ANDROID PROJECT IMPORT SUMMARY
2 +======================================
3 +
4 +Ignored Files:
5 +--------------
6 +The following files were *not* copied into the new Gradle project; you
7 +should evaluate whether these are still needed in your project and if
8 +so manually move them:
9 +
10 +* .gitignore
11 +* LICENSE
12 +* README.md
13 +* build.xml
14 +* doc/
15 +* doc/allclasses-frame.html
16 +* doc/allclasses-noframe.html
17 +* doc/constant-values.html
18 +* doc/deprecated-list.html
19 +* doc/help-doc.html
20 +* doc/index-all.html
21 +* doc/index.html
22 +* doc/net/
23 +* doc/net/majorkernelpanic/
24 +* doc/net/majorkernelpanic/streaming/
25 +* doc/net/majorkernelpanic/streaming/MediaStream.html
26 +* doc/net/majorkernelpanic/streaming/Session.Callback.html
27 +* doc/net/majorkernelpanic/streaming/Session.html
28 +* doc/net/majorkernelpanic/streaming/SessionBuilder.html
29 +* doc/net/majorkernelpanic/streaming/Stream.html
30 +* doc/net/majorkernelpanic/streaming/audio/
31 +* doc/net/majorkernelpanic/streaming/audio/AACStream.html
32 +* doc/net/majorkernelpanic/streaming/audio/AMRNBStream.html
33 +* doc/net/majorkernelpanic/streaming/audio/AudioQuality.html
34 +* doc/net/majorkernelpanic/streaming/audio/AudioStream.html
35 +* doc/net/majorkernelpanic/streaming/audio/package-frame.html
36 +* doc/net/majorkernelpanic/streaming/audio/package-summary.html
37 +* doc/net/majorkernelpanic/streaming/audio/package-tree.html
38 +* doc/net/majorkernelpanic/streaming/exceptions/
39 +* doc/net/majorkernelpanic/streaming/exceptions/CameraInUseException.html
40 +* doc/net/majorkernelpanic/streaming/exceptions/ConfNotSupportedException.html
41 +* doc/net/majorkernelpanic/streaming/exceptions/InvalidSurfaceException.html
42 +* doc/net/majorkernelpanic/streaming/exceptions/StorageUnavailableException.html
43 +* doc/net/majorkernelpanic/streaming/exceptions/package-frame.html
44 +* doc/net/majorkernelpanic/streaming/exceptions/package-summary.html
45 +* doc/net/majorkernelpanic/streaming/exceptions/package-tree.html
46 +* doc/net/majorkernelpanic/streaming/gl/
47 +* doc/net/majorkernelpanic/streaming/gl/SurfaceManager.html
48 +* doc/net/majorkernelpanic/streaming/gl/SurfaceView.ViewAspectRatioMeasurer.html
49 +* doc/net/majorkernelpanic/streaming/gl/SurfaceView.html
50 +* doc/net/majorkernelpanic/streaming/gl/TextureManager.html
51 +* doc/net/majorkernelpanic/streaming/gl/package-frame.html
52 +* doc/net/majorkernelpanic/streaming/gl/package-summary.html
53 +* doc/net/majorkernelpanic/streaming/gl/package-tree.html
54 +* doc/net/majorkernelpanic/streaming/hw/
55 +* doc/net/majorkernelpanic/streaming/hw/CodecManager.html
56 +* doc/net/majorkernelpanic/streaming/hw/EncoderDebugger.html
57 +* doc/net/majorkernelpanic/streaming/hw/NV21Convertor.html
58 +* doc/net/majorkernelpanic/streaming/hw/package-frame.html
59 +* doc/net/majorkernelpanic/streaming/hw/package-summary.html
60 +* doc/net/majorkernelpanic/streaming/hw/package-tree.html
61 +* doc/net/majorkernelpanic/streaming/mp4/
62 +* doc/net/majorkernelpanic/streaming/mp4/MP4Config.html
63 +* doc/net/majorkernelpanic/streaming/mp4/MP4Parser.html
64 +* doc/net/majorkernelpanic/streaming/mp4/package-frame.html
65 +* doc/net/majorkernelpanic/streaming/mp4/package-summary.html
66 +* doc/net/majorkernelpanic/streaming/mp4/package-tree.html
67 +* doc/net/majorkernelpanic/streaming/package-frame.html
68 +* doc/net/majorkernelpanic/streaming/package-summary.html
69 +* doc/net/majorkernelpanic/streaming/package-tree.html
70 +* doc/net/majorkernelpanic/streaming/rtcp/
71 +* doc/net/majorkernelpanic/streaming/rtcp/SenderReport.html
72 +* doc/net/majorkernelpanic/streaming/rtcp/package-frame.html
73 +* doc/net/majorkernelpanic/streaming/rtcp/package-summary.html
74 +* doc/net/majorkernelpanic/streaming/rtcp/package-tree.html
75 +* doc/net/majorkernelpanic/streaming/rtp/
76 +* doc/net/majorkernelpanic/streaming/rtp/AACADTSPacketizer.html
77 +* doc/net/majorkernelpanic/streaming/rtp/AACLATMPacketizer.html
78 +* doc/net/majorkernelpanic/streaming/rtp/AMRNBPacketizer.html
79 +* doc/net/majorkernelpanic/streaming/rtp/AbstractPacketizer.html
80 +* doc/net/majorkernelpanic/streaming/rtp/H263Packetizer.html
81 +* doc/net/majorkernelpanic/streaming/rtp/H264Packetizer.html
82 +* doc/net/majorkernelpanic/streaming/rtp/MediaCodecInputStream.html
83 +* doc/net/majorkernelpanic/streaming/rtp/RtpSocket.html
84 +* doc/net/majorkernelpanic/streaming/rtp/package-frame.html
85 +* doc/net/majorkernelpanic/streaming/rtp/package-summary.html
86 +* doc/net/majorkernelpanic/streaming/rtp/package-tree.html
87 +* doc/net/majorkernelpanic/streaming/rtsp/
88 +* doc/net/majorkernelpanic/streaming/rtsp/RtspClient.Callback.html
89 +* doc/net/majorkernelpanic/streaming/rtsp/RtspClient.html
90 +* doc/net/majorkernelpanic/streaming/rtsp/RtspServer.CallbackListener.html
91 +* doc/net/majorkernelpanic/streaming/rtsp/RtspServer.LocalBinder.html
92 +* doc/net/majorkernelpanic/streaming/rtsp/RtspServer.html
93 +* doc/net/majorkernelpanic/streaming/rtsp/UriParser.html
94 +* doc/net/majorkernelpanic/streaming/rtsp/package-frame.html
95 +* doc/net/majorkernelpanic/streaming/rtsp/package-summary.html
96 +* doc/net/majorkernelpanic/streaming/rtsp/package-tree.html
97 +* doc/net/majorkernelpanic/streaming/video/
98 +* doc/net/majorkernelpanic/streaming/video/CodecManager.html
99 +* doc/net/majorkernelpanic/streaming/video/H263Stream.html
100 +* doc/net/majorkernelpanic/streaming/video/H264Stream.html
101 +* doc/net/majorkernelpanic/streaming/video/VideoQuality.html
102 +* doc/net/majorkernelpanic/streaming/video/VideoStream.html
103 +* doc/net/majorkernelpanic/streaming/video/package-frame.html
104 +* doc/net/majorkernelpanic/streaming/video/package-summary.html
105 +* doc/net/majorkernelpanic/streaming/video/package-tree.html
106 +* doc/overview-frame.html
107 +* doc/overview-summary.html
108 +* doc/overview-tree.html
109 +* doc/package-list
110 +* doc/resources/
111 +* doc/resources/background.gif
112 +* doc/resources/tab.gif
113 +* doc/resources/titlebar.gif
114 +* doc/resources/titlebar_end.gif
115 +* doc/serialized-form.html
116 +* doc/stylesheet.css
117 +* pom.xml
118 +* proguard-project.txt
119 +
120 +Moved Files:
121 +------------
122 +Android Gradle projects use a different directory structure than ADT
123 +Eclipse projects. Here's how the projects were restructured:
124 +
125 +* AndroidManifest.xml => libstreaming/src/main/AndroidManifest.xml
126 +* res/ => libstreaming/src/main/res/
127 +* src/ => libstreaming/src/main/java/
128 +
129 +Next Steps:
130 +-----------
131 +You can now build the project. The Gradle project needs network
132 +connectivity to download dependencies.
133 +
134 +Bugs:
135 +-----
136 +If for some reason your project does not build, and you determine that
137 +it is due to a bug or limitation of the Eclipse to Gradle importer,
138 +please file a bug at http://b.android.com with category
139 +Component-Tools.
140 +
141 +(This import summary is for your information only, and can be deleted
142 +after import once you are satisfied with the results.)
1 +apply plugin: 'com.android.library'
2 +
3 +android {
4 + compileSdkVersion 28
5 + buildToolsVersion "28.0.3"
6 +
7 + defaultConfig {
8 + minSdkVersion 22
9 + targetSdkVersion 28
10 + }
11 +
12 + buildTypes {
13 + release {
14 + minifyEnabled false
15 + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
16 + }
17 + }
18 +}
1 +/**
2 + * Automatically generated file. DO NOT MODIFY
3 + */
4 +package net.majorkernelpanic.streaming;
5 +
6 +public final class BuildConfig {
7 + public static final boolean DEBUG = Boolean.parseBoolean("true");
8 + public static final String LIBRARY_PACKAGE_NAME = "net.majorkernelpanic.streaming";
9 + /**
10 + * @deprecated APPLICATION_ID is misleading in libraries. For the library package name use LIBRARY_PACKAGE_NAME
11 + */
12 + @Deprecated
13 + public static final String APPLICATION_ID = "net.majorkernelpanic.streaming";
14 + public static final String BUILD_TYPE = "debug";
15 + public static final String FLAVOR = "";
16 + public static final int VERSION_CODE = 40;
17 + public static final String VERSION_NAME = "4.0";
18 +}
1 +/**
2 + * Automatically generated file. DO NOT MODIFY
3 + */
4 +package net.majorkernelpanic.streaming;
5 +
6 +public final class BuildConfig {
7 + public static final boolean DEBUG = Boolean.parseBoolean("true");
8 + public static final String LIBRARY_PACKAGE_NAME = "net.majorkernelpanic.streaming";
9 + /**
10 + * @deprecated APPLICATION_ID is misleading in libraries. For the library package name use LIBRARY_PACKAGE_NAME
11 + */
12 + @Deprecated
13 + public static final String APPLICATION_ID = "net.majorkernelpanic.streaming";
14 + public static final String BUILD_TYPE = "debug";
15 + public static final String FLAVOR = "";
16 + public static final int VERSION_CODE = 40;
17 + public static final String VERSION_NAME = "4.0";
18 +}
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<manifest xmlns:android="http://schemas.android.com/apk/res/android"
3 + package="net.majorkernelpanic.streaming"
4 + android:versionCode="40"
5 + android:versionName="4.0" >
6 +
7 + <uses-sdk
8 + android:minSdkVersion="22"
9 + android:targetSdkVersion="28" />
10 +
11 +</manifest>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<manifest xmlns:android="http://schemas.android.com/apk/res/android"
3 + package="net.majorkernelpanic.streaming"
4 + android:versionCode="40"
5 + android:versionName="4.0" >
6 +
7 + <uses-sdk
8 + android:minSdkVersion="22"
9 + android:targetSdkVersion="28" />
10 +
11 +</manifest>
...\ No newline at end of file ...\ No newline at end of file
1 +[{"outputType":{"type":"AAPT_FRIENDLY_MERGED_MANIFESTS"},"apkData":{"type":"MAIN","splits":[],"versionCode":40,"versionName":"4.0","enabled":true,"outputFile":"libstreaming-debug.aar","fullName":"debug","baseName":"debug","dirName":""},"path":"AndroidManifest.xml","properties":{"packageId":"net.majorkernelpanic.streaming","split":""}}]
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/jniLibs"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/jniLibs"/></dataSet></merger>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/jniLibs"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/jniLibs"/></dataSet></merger>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/shaders"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/shaders"/></dataSet></merger>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/shaders"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/shaders"/></dataSet></merger>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/assets"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/intermediates/shader_assets/debug/out"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/assets"/></dataSet></merger>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/assets"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/intermediates/shader_assets/debug/out"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/assets"/></dataSet></merger>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<merger version="3"><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="main$Generated" generated="true" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/res"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/generated/res/rs/debug"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/generated/res/resValues/debug"/></dataSet><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="main" generated-set="main$Generated" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/res"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/generated/res/rs/debug"/><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/build/generated/res/resValues/debug"/></dataSet><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="debug$Generated" generated="true" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/res"/></dataSet><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="debug" generated-set="debug$Generated" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/debug/res"/></dataSet><mergedItems/></merger>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<manifest xmlns:android="http://schemas.android.com/apk/res/android"
3 + package="net.majorkernelpanic.streaming"
4 + android:versionCode="40"
5 + android:versionName="4.0" >
6 +
7 + <uses-sdk
8 + android:minSdkVersion="22"
9 + android:targetSdkVersion="28" />
10 +
11 +</manifest>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<manifest xmlns:android="http://schemas.android.com/apk/res/android"
3 + package="net.majorkernelpanic.streaming"
4 + android:versionCode="40"
5 + android:versionName="4.0" >
6 +
7 + <uses-sdk
8 + android:minSdkVersion="22"
9 + android:targetSdkVersion="28" />
10 +
11 +</manifest>
...\ No newline at end of file ...\ No newline at end of file
1 +R_DEF: Internal format may change without notice
2 +local
1 +1<?xml version="1.0" encoding="utf-8"?>
2 +2<manifest xmlns:android="http://schemas.android.com/apk/res/android"
3 +3 package="net.majorkernelpanic.streaming"
4 +4 android:versionCode="40"
5 +5 android:versionName="4.0" >
6 +6
7 +7 <uses-sdk
8 +7-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
9 +8 android:minSdkVersion="22"
10 +8-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
11 +9 android:targetSdkVersion="28" />
12 +9-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:7:9-38
13 +10
14 +11</manifest>
1 +1<?xml version="1.0" encoding="utf-8"?>
2 +2<manifest xmlns:android="http://schemas.android.com/apk/res/android"
3 +3 package="net.majorkernelpanic.streaming"
4 +4 android:versionCode="40"
5 +5 android:versionName="4.0" >
6 +6
7 +7 <uses-sdk
8 +7-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
9 +8 android:minSdkVersion="22"
10 +8-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
11 +9 android:targetSdkVersion="28" />
12 +9-->/Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:7:9-38
13 +10
14 +11</manifest>
1 +[{"outputType":{"type":"MERGED_MANIFESTS"},"apkData":{"type":"MAIN","splits":[],"versionCode":40,"versionName":"4.0","enabled":true,"outputFile":"libstreaming-debug.aar","fullName":"debug","baseName":"debug","dirName":""},"path":"../../library_manifest/debug/AndroidManifest.xml","properties":{"packageId":"net.majorkernelpanic.streaming","split":""}}]
...\ No newline at end of file ...\ No newline at end of file
1 +[{"outputType":{"type":"MERGED_MANIFESTS"},"apkData":{"type":"MAIN","splits":[],"versionCode":40,"versionName":"4.0","enabled":true,"outputFile":"libstreaming-debug.aar","fullName":"debug","baseName":"debug","dirName":""},"path":"../../library_manifest/debug/AndroidManifest.xml","properties":{"packageId":"net.majorkernelpanic.streaming","split":""}}]
...\ No newline at end of file ...\ No newline at end of file
1 +[]
...\ No newline at end of file ...\ No newline at end of file
1 +[]
...\ No newline at end of file ...\ No newline at end of file
1 +-- Merging decision tree log ---
2 +manifest
3 +ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
4 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
5 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
6 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
7 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
8 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
9 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
10 + package
11 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:2:5-45
12 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
13 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
14 + android:versionName
15 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:4:5-30
16 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
17 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
18 + xmlns:android
19 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:11-69
20 + android:versionCode
21 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:3:5-29
22 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
23 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
24 +uses-sdk
25 +ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
26 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
27 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
28 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
29 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
30 + android:targetSdkVersion
31 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:7:9-38
32 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
33 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
34 + android:minSdkVersion
35 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
36 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
37 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
1 +-- Merging decision tree log ---
2 +manifest
3 +ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
4 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
5 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
6 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
7 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
8 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
9 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:1-9:12
10 + package
11 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:2:5-45
12 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
13 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
14 + android:versionName
15 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:4:5-30
16 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
17 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
18 + xmlns:android
19 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:1:11-69
20 + android:versionCode
21 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:3:5-29
22 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
23 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
24 +uses-sdk
25 +ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
26 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
27 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
28 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
29 +INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
30 + android:targetSdkVersion
31 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:7:9-38
32 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
33 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
34 + android:minSdkVersion
35 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
36 + ADDED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml:6:5-7:41
37 + INJECTED from /Users/gwonjoohee/Desktop/CCTedV/libstreaming/src/main/AndroidManifest.xml
1 +<manifest xmlns:android="http://schemas.android.com/apk/res/android"
2 + package="net.majorkernelpanic.streaming"
3 + android:versionCode="40"
4 + android:versionName="4.0" >
5 +
6 + <uses-sdk
7 + android:targetSdkVersion="19" />
8 +
9 +</manifest>
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming;
20 +
21 +import java.io.IOException;
22 +import java.io.OutputStream;
23 +import java.net.InetAddress;
24 +import java.util.Random;
25 +import net.majorkernelpanic.streaming.audio.AudioStream;
26 +import net.majorkernelpanic.streaming.rtp.AbstractPacketizer;
27 +import net.majorkernelpanic.streaming.video.VideoStream;
28 +import android.annotation.SuppressLint;
29 +import android.media.MediaCodec;
30 +import android.media.MediaRecorder;
31 +import android.net.LocalServerSocket;
32 +import android.net.LocalSocket;
33 +import android.net.LocalSocketAddress;
34 +import android.os.Build;
35 +import android.os.ParcelFileDescriptor;
36 +import android.util.Log;
37 +
38 +/**
39 + * A MediaRecorder that streams what it records using a packetizer from the RTP package.
40 + * You can't use this class directly !
41 + */
42 +public abstract class MediaStream implements Stream {
43 +
44 + protected static final String TAG = "MediaStream";
45 +
46 + /** Raw audio/video will be encoded using the MediaRecorder API. */
47 + public static final byte MODE_MEDIARECORDER_API = 0x01;
48 +
49 + /** Raw audio/video will be encoded using the MediaCodec API with buffers. */
50 + public static final byte MODE_MEDIACODEC_API = 0x02;
51 +
52 + /** Raw audio/video will be encoded using the MediaCode API with a surface. */
53 + public static final byte MODE_MEDIACODEC_API_2 = 0x05;
54 +
55 + /** A LocalSocket will be used to feed the MediaRecorder object */
56 + public static final byte PIPE_API_LS = 0x01;
57 +
58 + /** A ParcelFileDescriptor will be used to feed the MediaRecorder object */
59 + public static final byte PIPE_API_PFD = 0x02;
60 +
61 + /** Prefix that will be used for all shared preferences saved by libstreaming */
62 + protected static final String PREF_PREFIX = "libstreaming-";
63 +
64 + /** The packetizer that will read the output of the camera and send RTP packets over the networked. */
65 + protected AbstractPacketizer mPacketizer = null;
66 +
67 + protected static byte sSuggestedMode = MODE_MEDIARECORDER_API;
68 + protected byte mMode, mRequestedMode;
69 +
70 + /**
71 + * Starting lollipop the LocalSocket API cannot be used to feed a MediaRecorder object.
72 + * You can force what API to use to create the pipe that feeds it with this constant
73 + * by using {@link #PIPE_API_LS} and {@link #PIPE_API_PFD}.
74 + */
75 + protected final static byte sPipeApi;
76 +
77 + protected boolean mStreaming = false, mConfigured = false;
78 + protected int mRtpPort = 0, mRtcpPort = 0;
79 + protected byte mChannelIdentifier = 0;
80 + protected OutputStream mOutputStream = null;
81 + protected InetAddress mDestination;
82 +
83 + protected ParcelFileDescriptor[] mParcelFileDescriptors;
84 + protected ParcelFileDescriptor mParcelRead;
85 + protected ParcelFileDescriptor mParcelWrite;
86 +
87 + protected LocalSocket mReceiver, mSender = null;
88 + private LocalServerSocket mLss = null;
89 + private int mSocketId;
90 +
91 + private int mTTL = 64;
92 +
93 + protected MediaRecorder mMediaRecorder;
94 + protected MediaCodec mMediaCodec;
95 +
96 + static {
97 + // We determine whether or not the MediaCodec API should be used
98 + try {
99 + Class.forName("android.media.MediaCodec");
100 + // Will be set to MODE_MEDIACODEC_API at some point...
101 + sSuggestedMode = MODE_MEDIACODEC_API;
102 + Log.i(TAG,"Phone supports the MediaCoded API");
103 + } catch (ClassNotFoundException e) {
104 + sSuggestedMode = MODE_MEDIARECORDER_API;
105 + Log.i(TAG,"Phone does not support the MediaCodec API");
106 + }
107 +
108 + // Starting lollipop, the LocalSocket API cannot be used anymore to feed
109 + // a MediaRecorder object for security reasons
110 + if (Build.VERSION.SDK_INT > Build.VERSION_CODES.KITKAT_WATCH) {
111 + sPipeApi = PIPE_API_PFD;
112 + } else {
113 + sPipeApi = PIPE_API_LS;
114 + }
115 + }
116 +
117 + public MediaStream() {
118 + mRequestedMode = sSuggestedMode;
119 + mMode = sSuggestedMode;
120 + }
121 +
122 + /**
123 + * Sets the destination IP address of the stream.
124 + * @param dest The destination address of the stream
125 + */
126 + public void setDestinationAddress(InetAddress dest) {
127 + mDestination = dest;
128 + }
129 +
130 + /**
131 + * Sets the destination ports of the stream.
132 + * If an odd number is supplied for the destination port then the next
133 + * lower even number will be used for RTP and it will be used for RTCP.
134 + * If an even number is supplied, it will be used for RTP and the next odd
135 + * number will be used for RTCP.
136 + * @param dport The destination port
137 + */
138 + public void setDestinationPorts(int dport) {
139 + if (dport % 2 == 1) {
140 + mRtpPort = dport-1;
141 + mRtcpPort = dport;
142 + } else {
143 + mRtpPort = dport;
144 + mRtcpPort = dport+1;
145 + }
146 + }
147 +
148 + /**
149 + * Sets the destination ports of the stream.
150 + * @param rtpPort Destination port that will be used for RTP
151 + * @param rtcpPort Destination port that will be used for RTCP
152 + */
153 + public void setDestinationPorts(int rtpPort, int rtcpPort) {
154 + mRtpPort = rtpPort;
155 + mRtcpPort = rtcpPort;
156 + mOutputStream = null;
157 + }
158 +
159 + /**
160 + * If a TCP is used as the transport protocol for the RTP session,
161 + * the output stream to which RTP packets will be written to must
162 + * be specified with this method.
163 + */
164 + public void setOutputStream(OutputStream stream, byte channelIdentifier) {
165 + mOutputStream = stream;
166 + mChannelIdentifier = channelIdentifier;
167 + }
168 +
169 +
170 + /**
171 + * Sets the Time To Live of packets sent over the network.
172 + * @param ttl The time to live
173 + * @throws IOException
174 + */
175 + public void setTimeToLive(int ttl) throws IOException {
176 + mTTL = ttl;
177 + }
178 +
179 + /**
180 + * Returns a pair of destination ports, the first one is the
181 + * one used for RTP and the second one is used for RTCP.
182 + **/
183 + public int[] getDestinationPorts() {
184 + return new int[] {
185 + mRtpPort,
186 + mRtcpPort
187 + };
188 + }
189 +
190 + /**
191 + * Returns a pair of source ports, the first one is the
192 + * one used for RTP and the second one is used for RTCP.
193 + **/
194 + public int[] getLocalPorts() {
195 + return mPacketizer.getRtpSocket().getLocalPorts();
196 + }
197 +
198 + /**
199 + * Sets the streaming method that will be used.
200 + *
201 + * If the mode is set to {@link #MODE_MEDIARECORDER_API}, raw audio/video will be encoded
202 + * using the MediaRecorder API. <br />
203 + *
204 + * If the mode is set to {@link #MODE_MEDIACODEC_API} or to {@link #MODE_MEDIACODEC_API_2},
205 + * audio/video will be encoded with using the MediaCodec. <br />
206 + *
207 + * The {@link #MODE_MEDIACODEC_API_2} mode only concerns {@link VideoStream}, it makes
208 + * use of the createInputSurface() method of the MediaCodec API (Android 4.3 is needed there). <br />
209 + *
210 + * @param mode Can be {@link #MODE_MEDIARECORDER_API}, {@link #MODE_MEDIACODEC_API} or {@link #MODE_MEDIACODEC_API_2}
211 + */
212 + public void setStreamingMethod(byte mode) {
213 + mRequestedMode = mode;
214 + }
215 +
216 + /**
217 + * Returns the streaming method in use, call this after
218 + * {@link #configure()} to get an accurate response.
219 + */
220 + public byte getStreamingMethod() {
221 + return mMode;
222 + }
223 +
224 + /**
225 + * Returns the packetizer associated with the {@link MediaStream}.
226 + * @return The packetizer
227 + */
228 + public AbstractPacketizer getPacketizer() {
229 + return mPacketizer;
230 + }
231 +
232 + /**
233 + * Returns an approximation of the bit rate consumed by the stream in bit per seconde.
234 + */
235 + public long getBitrate() {
236 + return !mStreaming ? 0 : mPacketizer.getRtpSocket().getBitrate();
237 + }
238 +
239 + /**
240 + * Indicates if the {@link MediaStream} is streaming.
241 + * @return A boolean indicating if the {@link MediaStream} is streaming
242 + */
243 + public boolean isStreaming() {
244 + return mStreaming;
245 + }
246 +
247 + /**
248 + * Configures the stream with the settings supplied with
249 + * {@link VideoStream#setVideoQuality(net.majorkernelpanic.streaming.video.VideoQuality)}
250 + * for a {@link VideoStream} and {@link AudioStream#setAudioQuality(net.majorkernelpanic.streaming.audio.AudioQuality)}
251 + * for a {@link AudioStream}.
252 + */
253 + public synchronized void configure() throws IllegalStateException, IOException {
254 + if (mStreaming) throw new IllegalStateException("Can't be called while streaming.");
255 + if (mPacketizer != null) {
256 + mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort);
257 + mPacketizer.getRtpSocket().setOutputStream(mOutputStream, mChannelIdentifier);
258 + }
259 + mMode = mRequestedMode;
260 + mConfigured = true;
261 + }
262 +
263 + /** Starts the stream. */
264 + public synchronized void start() throws IllegalStateException, IOException {
265 +
266 + if (mDestination==null)
267 + throw new IllegalStateException("No destination ip address set for the stream !");
268 +
269 + if (mRtpPort<=0 || mRtcpPort<=0)
270 + throw new IllegalStateException("No destination ports set for the stream !");
271 +
272 + mPacketizer.setTimeToLive(mTTL);
273 +
274 + if (mMode != MODE_MEDIARECORDER_API) {
275 + encodeWithMediaCodec();
276 + } else {
277 + encodeWithMediaRecorder();
278 + }
279 +
280 + }
281 +
282 + /** Stops the stream. */
283 + @SuppressLint("NewApi")
284 + public synchronized void stop() {
285 + if (mStreaming) {
286 + try {
287 + if (mMode==MODE_MEDIARECORDER_API) {
288 + mMediaRecorder.stop();
289 + mMediaRecorder.release();
290 + mMediaRecorder = null;
291 + closeSockets();
292 + mPacketizer.stop();
293 + } else {
294 + mPacketizer.stop();
295 + mMediaCodec.stop();
296 + mMediaCodec.release();
297 + mMediaCodec = null;
298 + }
299 + } catch (Exception e) {
300 + e.printStackTrace();
301 + }
302 + mStreaming = false;
303 + }
304 + }
305 +
306 + protected abstract void encodeWithMediaRecorder() throws IOException;
307 +
308 + protected abstract void encodeWithMediaCodec() throws IOException;
309 +
310 + /**
311 + * Returns a description of the stream using SDP.
312 + * This method can only be called after {@link Stream#configure()}.
313 + * @throws IllegalStateException Thrown when {@link Stream#configure()} was not called.
314 + */
315 + public abstract String getSessionDescription();
316 +
317 + /**
318 + * Returns the SSRC of the underlying {@link net.majorkernelpanic.streaming.rtp.RtpSocket}.
319 + * @return the SSRC of the stream
320 + */
321 + public int getSSRC() {
322 + return getPacketizer().getSSRC();
323 + }
324 +
325 + protected void createSockets() throws IOException {
326 +
327 + if (sPipeApi == PIPE_API_LS) {
328 +
329 + final String LOCAL_ADDR = "net.majorkernelpanic.streaming-";
330 +
331 + for (int i=0;i<10;i++) {
332 + try {
333 + mSocketId = new Random().nextInt();
334 + mLss = new LocalServerSocket(LOCAL_ADDR+mSocketId);
335 + break;
336 + } catch (IOException e1) {}
337 + }
338 +
339 + mReceiver = new LocalSocket();
340 + mReceiver.connect( new LocalSocketAddress(LOCAL_ADDR+mSocketId));
341 + mReceiver.setReceiveBufferSize(500000);
342 + mReceiver.setSoTimeout(3000);
343 + mSender = mLss.accept();
344 + mSender.setSendBufferSize(500000);
345 +
346 + } else {
347 + Log.e(TAG, "parcelFileDescriptors createPipe version = Lollipop");
348 + mParcelFileDescriptors = ParcelFileDescriptor.createPipe();
349 + mParcelRead = new ParcelFileDescriptor(mParcelFileDescriptors[0]);
350 + mParcelWrite = new ParcelFileDescriptor(mParcelFileDescriptors[1]);
351 + }
352 + }
353 +
354 + protected void closeSockets() {
355 + if (sPipeApi == PIPE_API_LS) {
356 + try {
357 + mReceiver.close();
358 + } catch (Exception e) {
359 + e.printStackTrace();
360 + }
361 + try {
362 + mSender.close();
363 + } catch (Exception e) {
364 + e.printStackTrace();
365 + }
366 + try {
367 + mLss.close();
368 + } catch (Exception e) {
369 + e.printStackTrace();
370 + }
371 + mLss = null;
372 + mSender = null;
373 + mReceiver = null;
374 +
375 + } else {
376 + try {
377 + if (mParcelRead != null) {
378 + mParcelRead.close();
379 + }
380 + } catch (Exception e) {
381 + e.printStackTrace();
382 + }
383 + try {
384 + if (mParcelWrite != null) {
385 + mParcelWrite.close();
386 + }
387 + } catch (Exception e) {
388 + e.printStackTrace();
389 + }
390 + }
391 + }
392 +
393 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming;
20 +
21 +import java.io.IOException;
22 +import java.net.InetAddress;
23 +import java.net.UnknownHostException;
24 +import java.util.concurrent.CountDownLatch;
25 +import net.majorkernelpanic.streaming.audio.AudioQuality;
26 +import net.majorkernelpanic.streaming.audio.AudioStream;
27 +import net.majorkernelpanic.streaming.exceptions.CameraInUseException;
28 +import net.majorkernelpanic.streaming.exceptions.ConfNotSupportedException;
29 +import net.majorkernelpanic.streaming.exceptions.InvalidSurfaceException;
30 +import net.majorkernelpanic.streaming.exceptions.StorageUnavailableException;
31 +import net.majorkernelpanic.streaming.gl.SurfaceView;
32 +import net.majorkernelpanic.streaming.rtsp.RtspClient;
33 +import net.majorkernelpanic.streaming.video.VideoQuality;
34 +import net.majorkernelpanic.streaming.video.VideoStream;
35 +import android.hardware.Camera.CameraInfo;
36 +import android.os.Handler;
37 +import android.os.HandlerThread;
38 +import android.os.Looper;
39 +
40 +/**
41 + * You should instantiate this class with the {@link SessionBuilder}.<br />
42 + * This is the class you will want to use to stream audio and or video to some peer using RTP.<br />
43 + *
44 + * It holds a {@link VideoStream} and a {@link AudioStream} together and provides
45 + * synchronous and asynchronous functions to start and stop those steams.
46 + * You should implement a callback interface {@link Callback} to receive notifications and error reports.<br />
47 + *
48 + * If you want to stream to a RTSP server, you will need an instance of this class and hand it to a {@link RtspClient}.
49 + *
50 + * If you don't use the RTSP protocol, you will still need to send a session description to the receiver
51 + * for him to be able to decode your audio/video streams. You can obtain this session description by calling
52 + * {@link #configure()} or {@link #syncConfigure()} to configure the session with its parameters
53 + * (audio samplingrate, video resolution) and then {@link Session#getSessionDescription()}.<br />
54 + *
55 + * See the example 2 here: https://github.com/fyhertz/libstreaming-examples to
56 + * see an example of how to get a SDP.<br />
57 + *
58 + * See the example 3 here: https://github.com/fyhertz/libstreaming-examples to
59 + * see an example of how to stream to a RTSP server.<br />
60 + *
61 + */
62 +public class Session {
63 +
64 + public final static String TAG = "Session";
65 +
66 + public final static int STREAM_VIDEO = 0x01;
67 +
68 + public final static int STREAM_AUDIO = 0x00;
69 +
70 + /** Some app is already using a camera (Camera.open() has failed). */
71 + public final static int ERROR_CAMERA_ALREADY_IN_USE = 0x00;
72 +
73 + /** The phone may not support some streaming parameters that you are trying to use (bit rate, frame rate, resolution...). */
74 + public final static int ERROR_CONFIGURATION_NOT_SUPPORTED = 0x01;
75 +
76 + /**
77 + * The internal storage of the phone is not ready.
78 + * libstreaming tried to store a test file on the sdcard but couldn't.
79 + * See H264Stream and AACStream to find out why libstreaming would want to something like that.
80 + */
81 + public final static int ERROR_STORAGE_NOT_READY = 0x02;
82 +
83 + /** The phone has no flash. */
84 + public final static int ERROR_CAMERA_HAS_NO_FLASH = 0x03;
85 +
86 + /** The supplied SurfaceView is not a valid surface, or has not been created yet. */
87 + public final static int ERROR_INVALID_SURFACE = 0x04;
88 +
89 + /**
90 + * The destination set with {@link Session#setDestination(String)} could not be resolved.
91 + * May mean that the phone has no access to the internet, or that the DNS server could not
92 + * resolved the host name.
93 + */
94 + public final static int ERROR_UNKNOWN_HOST = 0x05;
95 +
96 + /**
97 + * Some other error occurred !
98 + */
99 + public final static int ERROR_OTHER = 0x06;
100 +
101 + private String mOrigin;
102 + private String mDestination;
103 + private int mTimeToLive = 64;
104 + private long mTimestamp;
105 +
106 + private AudioStream mAudioStream = null;
107 + private VideoStream mVideoStream = null;
108 +
109 + private Callback mCallback;
110 + private Handler mMainHandler;
111 +
112 + private Handler mHandler;
113 +
114 + /**
115 + * Creates a streaming session that can be customized by adding tracks.
116 + */
117 + public Session() {
118 + long uptime = System.currentTimeMillis();
119 +
120 + HandlerThread thread = new HandlerThread("net.majorkernelpanic.streaming.Session");
121 + thread.start();
122 +
123 + mHandler = new Handler(thread.getLooper());
124 + mMainHandler = new Handler(Looper.getMainLooper());
125 + mTimestamp = (uptime/1000)<<32 & (((uptime-((uptime/1000)*1000))>>32)/1000); // NTP timestamp
126 + mOrigin = "127.0.0.1";
127 + }
128 +
129 + /**
130 + * The callback interface you need to implement to get some feedback
131 + * Those will be called from the UI thread.
132 + */
133 + public interface Callback {
134 +
135 + /**
136 + * Called periodically to inform you on the bandwidth
137 + * consumption of the streams when streaming.
138 + */
139 + public void onBitrateUpdate(long bitrate);
140 +
141 + /** Called when some error occurs. */
142 + public void onSessionError(int reason, int streamType, Exception e);
143 +
144 + /**
145 + * Called when the previw of the {@link VideoStream}
146 + * has correctly been started.
147 + * If an error occurs while starting the preview,
148 + * {@link Callback#onSessionError(int, int, Exception)} will be
149 + * called instead of {@link Callback#onPreviewStarted()}.
150 + */
151 + public void onPreviewStarted();
152 +
153 + /**
154 + * Called when the session has correctly been configured
155 + * after calling {@link Session#configure()}.
156 + * If an error occurs while configuring the {@link Session},
157 + * {@link Callback#onSessionError(int, int, Exception)} will be
158 + * called instead of {@link Callback#onSessionConfigured()}.
159 + */
160 + public void onSessionConfigured();
161 +
162 + /**
163 + * Called when the streams of the session have correctly been started.
164 + * If an error occurs while starting the {@link Session},
165 + * {@link Callback#onSessionError(int, int, Exception)} will be
166 + * called instead of {@link Callback#onSessionStarted()}.
167 + */
168 + public void onSessionStarted();
169 +
170 + /** Called when the stream of the session have been stopped. */
171 + public void onSessionStopped();
172 +
173 + }
174 +
175 + /** You probably don't need to use that directly, use the {@link SessionBuilder}. */
176 + void addAudioTrack(AudioStream track) {
177 + removeAudioTrack();
178 + mAudioStream = track;
179 + }
180 +
181 + /** You probably don't need to use that directly, use the {@link SessionBuilder}. */
182 + void addVideoTrack(VideoStream track) {
183 + removeVideoTrack();
184 + mVideoStream = track;
185 + }
186 +
187 + /** You probably don't need to use that directly, use the {@link SessionBuilder}. */
188 + void removeAudioTrack() {
189 + if (mAudioStream != null) {
190 + mAudioStream.stop();
191 + mAudioStream = null;
192 + }
193 + }
194 +
195 + /** You probably don't need to use that directly, use the {@link SessionBuilder}. */
196 + void removeVideoTrack() {
197 + if (mVideoStream != null) {
198 + mVideoStream.stopPreview();
199 + mVideoStream = null;
200 + }
201 + }
202 +
203 + /** Returns the underlying {@link AudioStream} used by the {@link Session}. */
204 + public AudioStream getAudioTrack() {
205 + return mAudioStream;
206 + }
207 +
208 + /** Returns the underlying {@link VideoStream} used by the {@link Session}. */
209 + public VideoStream getVideoTrack() {
210 + return mVideoStream;
211 + }
212 +
213 + /**
214 + * Sets the callback interface that will be called by the {@link Session}.
215 + * @param callback The implementation of the {@link Callback} interface
216 + */
217 + public void setCallback(Callback callback) {
218 + mCallback = callback;
219 + }
220 +
221 + /**
222 + * The origin address of the session.
223 + * It appears in the session description.
224 + * @param origin The origin address
225 + */
226 + public void setOrigin(String origin) {
227 + mOrigin = origin;
228 + }
229 +
230 + /**
231 + * The destination address for all the streams of the session. <br />
232 + * Changes will be taken into account the next time you start the session.
233 + * @param destination The destination address
234 + */
235 + public void setDestination(String destination) {
236 + mDestination = destination;
237 + }
238 +
239 + /**
240 + * Set the TTL of all packets sent during the session. <br />
241 + * Changes will be taken into account the next time you start the session.
242 + * @param ttl The Time To Live
243 + */
244 + public void setTimeToLive(int ttl) {
245 + mTimeToLive = ttl;
246 + }
247 +
248 + /**
249 + * Sets the configuration of the stream. <br />
250 + * You can call this method at any time and changes will take
251 + * effect next time you call {@link #configure()}.
252 + * @param quality Quality of the stream
253 + */
254 + public void setVideoQuality(VideoQuality quality) {
255 + if (mVideoStream != null) {
256 + mVideoStream.setVideoQuality(quality);
257 + }
258 + }
259 +
260 + /**
261 + * Sets a Surface to show a preview of recorded media (video). <br />
262 + * You can call this method at any time and changes will take
263 + * effect next time you call {@link #start()} or {@link #startPreview()}.
264 + */
265 + public void setSurfaceView(final SurfaceView view) {
266 + mHandler.post(new Runnable() {
267 + @Override
268 + public void run() {
269 + if (mVideoStream != null) {
270 + mVideoStream.setSurfaceView(view);
271 + }
272 + }
273 + });
274 + }
275 +
276 + /**
277 + * Sets the orientation of the preview. <br />
278 + * You can call this method at any time and changes will take
279 + * effect next time you call {@link #configure()}.
280 + * @param orientation The orientation of the preview
281 + */
282 + public void setPreviewOrientation(int orientation) {
283 + if (mVideoStream != null) {
284 + mVideoStream.setPreviewOrientation(orientation);
285 + }
286 + }
287 +
288 + /**
289 + * Sets the configuration of the stream. <br />
290 + * You can call this method at any time and changes will take
291 + * effect next time you call {@link #configure()}.
292 + * @param quality Quality of the stream
293 + */
294 + public void setAudioQuality(AudioQuality quality) {
295 + if (mAudioStream != null) {
296 + mAudioStream.setAudioQuality(quality);
297 + }
298 + }
299 +
300 + /**
301 + * Returns the {@link Callback} interface that was set with
302 + * {@link #setCallback(Callback)} or null if none was set.
303 + */
304 + public Callback getCallback() {
305 + return mCallback;
306 + }
307 +
308 + /**
309 + * Returns a Session Description that can be stored in a file or sent to a client with RTSP.
310 + * @return The Session Description.
311 + * @throws IllegalStateException Thrown when {@link #setDestination(String)} has never been called.
312 + */
313 + public String getSessionDescription() {
314 + StringBuilder sessionDescription = new StringBuilder();
315 + if (mDestination==null) {
316 + throw new IllegalStateException("setDestination() has not been called !");
317 + }
318 + sessionDescription.append("v=0\r\n");
319 + // TODO: Add IPV6 support
320 + sessionDescription.append("o=- "+mTimestamp+" "+mTimestamp+" IN IP4 "+mOrigin+"\r\n");
321 + sessionDescription.append("s=Unnamed\r\n");
322 + sessionDescription.append("i=N/A\r\n");
323 + sessionDescription.append("c=IN IP4 "+mDestination+"\r\n");
324 + // t=0 0 means the session is permanent (we don't know when it will stop)
325 + sessionDescription.append("t=0 0\r\n");
326 + sessionDescription.append("a=recvonly\r\n");
327 + // Prevents two different sessions from using the same peripheral at the same time
328 + if (mAudioStream != null) {
329 + sessionDescription.append(mAudioStream.getSessionDescription());
330 + sessionDescription.append("a=control:trackID="+0+"\r\n");
331 + }
332 + if (mVideoStream != null) {
333 + sessionDescription.append(mVideoStream.getSessionDescription());
334 + sessionDescription.append("a=control:trackID="+1+"\r\n");
335 + }
336 + return sessionDescription.toString();
337 + }
338 +
339 + /** Returns the destination set with {@link #setDestination(String)}. */
340 + public String getDestination() {
341 + return mDestination;
342 + }
343 +
344 + /** Returns an approximation of the bandwidth consumed by the session in bit per second. */
345 + public long getBitrate() {
346 + long sum = 0;
347 + if (mAudioStream != null) sum += mAudioStream.getBitrate();
348 + if (mVideoStream != null) sum += mVideoStream.getBitrate();
349 + return sum;
350 + }
351 +
352 + /** Indicates if a track is currently running. */
353 + public boolean isStreaming() {
354 + return (mAudioStream!=null && mAudioStream.isStreaming()) || (mVideoStream!=null && mVideoStream.isStreaming());
355 + }
356 +
357 + /**
358 + * Configures all streams of the session.
359 + **/
360 + public void configure() {
361 + mHandler.post(new Runnable() {
362 + @Override
363 + public void run() {
364 + try {
365 + syncConfigure();
366 + } catch (Exception e) {};
367 + }
368 + });
369 + }
370 +
371 + /**
372 + * Does the same thing as {@link #configure()}, but in a synchronous manner. <br />
373 + * Throws exceptions in addition to calling a callback
374 + * {@link Callback#onSessionError(int, int, Exception)} when
375 + * an error occurs.
376 + **/
377 + public void syncConfigure()
378 + throws CameraInUseException,
379 + StorageUnavailableException,
380 + ConfNotSupportedException,
381 + InvalidSurfaceException,
382 + RuntimeException,
383 + IOException {
384 +
385 + for (int id=0;id<2;id++) {
386 + Stream stream = id==0 ? mAudioStream : mVideoStream;
387 + if (stream!=null && !stream.isStreaming()) {
388 + try {
389 + stream.configure();
390 + } catch (CameraInUseException e) {
391 + postError(ERROR_CAMERA_ALREADY_IN_USE , id, e);
392 + throw e;
393 + } catch (StorageUnavailableException e) {
394 + postError(ERROR_STORAGE_NOT_READY , id, e);
395 + throw e;
396 + } catch (ConfNotSupportedException e) {
397 + postError(ERROR_CONFIGURATION_NOT_SUPPORTED , id, e);
398 + throw e;
399 + } catch (InvalidSurfaceException e) {
400 + postError(ERROR_INVALID_SURFACE , id, e);
401 + throw e;
402 + } catch (IOException e) {
403 + postError(ERROR_OTHER, id, e);
404 + throw e;
405 + } catch (RuntimeException e) {
406 + postError(ERROR_OTHER, id, e);
407 + throw e;
408 + }
409 + }
410 + }
411 + postSessionConfigured();
412 + }
413 +
414 + /**
415 + * Asynchronously starts all streams of the session.
416 + **/
417 + public void start() {
418 + mHandler.post(new Runnable() {
419 + @Override
420 + public void run() {
421 + try {
422 + syncStart();
423 + } catch (Exception e) {}
424 + }
425 + });
426 + }
427 +
428 + /**
429 + * Starts a stream in a synchronous manner. <br />
430 + * Throws exceptions in addition to calling a callback.
431 + * @param id The id of the stream to start
432 + **/
433 + public void syncStart(int id)
434 + throws CameraInUseException,
435 + StorageUnavailableException,
436 + ConfNotSupportedException,
437 + InvalidSurfaceException,
438 + UnknownHostException,
439 + IOException {
440 +
441 + Stream stream = id==0 ? mAudioStream : mVideoStream;
442 + if (stream!=null && !stream.isStreaming()) {
443 + try {
444 + InetAddress destination = InetAddress.getByName(mDestination);
445 + stream.setTimeToLive(mTimeToLive);
446 + stream.setDestinationAddress(destination);
447 + stream.start();
448 + if (getTrack(1-id) == null || getTrack(1-id).isStreaming()) {
449 + postSessionStarted();
450 + }
451 + if (getTrack(1-id) == null || !getTrack(1-id).isStreaming()) {
452 + mHandler.post(mUpdateBitrate);
453 + }
454 + } catch (UnknownHostException e) {
455 + postError(ERROR_UNKNOWN_HOST, id, e);
456 + throw e;
457 + } catch (CameraInUseException e) {
458 + postError(ERROR_CAMERA_ALREADY_IN_USE , id, e);
459 + throw e;
460 + } catch (StorageUnavailableException e) {
461 + postError(ERROR_STORAGE_NOT_READY , id, e);
462 + throw e;
463 + } catch (ConfNotSupportedException e) {
464 + postError(ERROR_CONFIGURATION_NOT_SUPPORTED , id, e);
465 + throw e;
466 + } catch (InvalidSurfaceException e) {
467 + postError(ERROR_INVALID_SURFACE , id, e);
468 + throw e;
469 + } catch (IOException e) {
470 + postError(ERROR_OTHER, id, e);
471 + throw e;
472 + } catch (RuntimeException e) {
473 + postError(ERROR_OTHER, id, e);
474 + throw e;
475 + }
476 + }
477 +
478 + }
479 +
480 + /**
481 + * Does the same thing as {@link #start()}, but in a synchronous manner. <br />
482 + * Throws exceptions in addition to calling a callback.
483 + **/
484 + public void syncStart()
485 + throws CameraInUseException,
486 + StorageUnavailableException,
487 + ConfNotSupportedException,
488 + InvalidSurfaceException,
489 + UnknownHostException,
490 + IOException {
491 +
492 + syncStart(1);
493 + try {
494 + syncStart(0);
495 + } catch (RuntimeException e) {
496 + syncStop(1);
497 + throw e;
498 + } catch (IOException e) {
499 + syncStop(1);
500 + throw e;
501 + }
502 +
503 + }
504 +
505 + /** Stops all existing streams. */
506 + public void stop() {
507 + mHandler.post(new Runnable() {
508 + @Override
509 + public void run() {
510 + syncStop();
511 + }
512 + });
513 + }
514 +
515 + /**
516 + * Stops one stream in a synchronous manner.
517 + * @param id The id of the stream to stop
518 + **/
519 + private void syncStop(final int id) {
520 + Stream stream = id==0 ? mAudioStream : mVideoStream;
521 + if (stream!=null) {
522 + stream.stop();
523 + }
524 + }
525 +
526 + /** Stops all existing streams in a synchronous manner. */
527 + public void syncStop() {
528 + syncStop(0);
529 + syncStop(1);
530 + postSessionStopped();
531 + }
532 +
533 + /**
534 + * Asynchronously starts the camera preview. <br />
535 + * You should of course pass a {@link SurfaceView} to {@link #setSurfaceView(SurfaceView)}
536 + * before calling this method. Otherwise, the {@link Callback#onSessionError(int, int, Exception)}
537 + * callback will be called with {@link #ERROR_INVALID_SURFACE}.
538 + */
539 + public void startPreview() {
540 + mHandler.post(new Runnable() {
541 + @Override
542 + public void run() {
543 + if (mVideoStream != null) {
544 + try {
545 + mVideoStream.startPreview();
546 + postPreviewStarted();
547 + mVideoStream.configure();
548 + } catch (CameraInUseException e) {
549 + postError(ERROR_CAMERA_ALREADY_IN_USE , STREAM_VIDEO, e);
550 + } catch (ConfNotSupportedException e) {
551 + postError(ERROR_CONFIGURATION_NOT_SUPPORTED , STREAM_VIDEO, e);
552 + } catch (InvalidSurfaceException e) {
553 + postError(ERROR_INVALID_SURFACE , STREAM_VIDEO, e);
554 + } catch (RuntimeException e) {
555 + postError(ERROR_OTHER, STREAM_VIDEO, e);
556 + } catch (StorageUnavailableException e) {
557 + postError(ERROR_STORAGE_NOT_READY, STREAM_VIDEO, e);
558 + } catch (IOException e) {
559 + postError(ERROR_OTHER, STREAM_VIDEO, e);
560 + }
561 + }
562 + }
563 + });
564 + }
565 +
566 + /**
567 + * Asynchronously stops the camera preview.
568 + */
569 + public void stopPreview() {
570 + mHandler.post(new Runnable() {
571 + @Override
572 + public void run() {
573 + if (mVideoStream != null) {
574 + mVideoStream.stopPreview();
575 + }
576 + }
577 + });
578 + }
579 +
580 + /** Switch between the front facing and the back facing camera of the phone. <br />
581 + * If {@link #startPreview()} has been called, the preview will be briefly interrupted. <br />
582 + * If {@link #start()} has been called, the stream will be briefly interrupted.<br />
583 + * To find out which camera is currently selected, use {@link #getCamera()}
584 + **/
585 + public void switchCamera() {
586 + mHandler.post(new Runnable() {
587 + @Override
588 + public void run() {
589 + if (mVideoStream != null) {
590 + try {
591 + mVideoStream.switchCamera();
592 + postPreviewStarted();
593 + } catch (CameraInUseException e) {
594 + postError(ERROR_CAMERA_ALREADY_IN_USE , STREAM_VIDEO, e);
595 + } catch (ConfNotSupportedException e) {
596 + postError(ERROR_CONFIGURATION_NOT_SUPPORTED , STREAM_VIDEO, e);
597 + } catch (InvalidSurfaceException e) {
598 + postError(ERROR_INVALID_SURFACE , STREAM_VIDEO, e);
599 + } catch (IOException e) {
600 + postError(ERROR_OTHER, STREAM_VIDEO, e);
601 + } catch (RuntimeException e) {
602 + postError(ERROR_OTHER, STREAM_VIDEO, e);
603 + }
604 + }
605 + }
606 + });
607 + }
608 +
609 + /**
610 + * Returns the id of the camera currently selected. <br />
611 + * It can be either {@link CameraInfo#CAMERA_FACING_BACK} or
612 + * {@link CameraInfo#CAMERA_FACING_FRONT}.
613 + */
614 + public int getCamera() {
615 + return mVideoStream != null ? mVideoStream.getCamera() : 0;
616 +
617 + }
618 +
619 + /**
620 + * Toggles the LED of the phone if it has one.
621 + * You can get the current state of the flash with
622 + * {@link Session#getVideoTrack()} and {@link VideoStream#getFlashState()}.
623 + **/
624 + public void toggleFlash() {
625 + mHandler.post(new Runnable() {
626 + @Override
627 + public void run() {
628 + if (mVideoStream != null) {
629 + try {
630 + mVideoStream.toggleFlash();
631 + } catch (RuntimeException e) {
632 + postError(ERROR_CAMERA_HAS_NO_FLASH, STREAM_VIDEO, e);
633 + }
634 + }
635 + }
636 + });
637 + }
638 +
639 + /** Deletes all existing tracks & release associated resources. */
640 + public void release() {
641 + removeAudioTrack();
642 + removeVideoTrack();
643 + mHandler.getLooper().quit();
644 + }
645 +
646 + private void postPreviewStarted() {
647 + mMainHandler.post(new Runnable() {
648 + @Override
649 + public void run() {
650 + if (mCallback != null) {
651 + mCallback.onPreviewStarted();
652 + }
653 + }
654 + });
655 + }
656 +
657 + private void postSessionConfigured() {
658 + mMainHandler.post(new Runnable() {
659 + @Override
660 + public void run() {
661 + if (mCallback != null) {
662 + mCallback.onSessionConfigured();
663 + }
664 + }
665 + });
666 + }
667 +
668 + private void postSessionStarted() {
669 + mMainHandler.post(new Runnable() {
670 + @Override
671 + public void run() {
672 + if (mCallback != null) {
673 + mCallback.onSessionStarted();
674 + }
675 + }
676 + });
677 + }
678 +
679 + private void postSessionStopped() {
680 + mMainHandler.post(new Runnable() {
681 + @Override
682 + public void run() {
683 + if (mCallback != null) {
684 + mCallback.onSessionStopped();
685 + }
686 + }
687 + });
688 + }
689 +
690 + private void postError(final int reason, final int streamType,final Exception e) {
691 + mMainHandler.post(new Runnable() {
692 + @Override
693 + public void run() {
694 + if (mCallback != null) {
695 + mCallback.onSessionError(reason, streamType, e);
696 + }
697 + }
698 + });
699 + }
700 +
701 + private void postBitRate(final long bitrate) {
702 + mMainHandler.post(new Runnable() {
703 + @Override
704 + public void run() {
705 + if (mCallback != null) {
706 + mCallback.onBitrateUpdate(bitrate);
707 + }
708 + }
709 + });
710 + }
711 +
712 + private Runnable mUpdateBitrate = new Runnable() {
713 + @Override
714 + public void run() {
715 + if (isStreaming()) {
716 + postBitRate(getBitrate());
717 + mHandler.postDelayed(mUpdateBitrate, 500);
718 + } else {
719 + postBitRate(0);
720 + }
721 + }
722 + };
723 +
724 +
725 + public boolean trackExists(int id) {
726 + if (id==0)
727 + return mAudioStream!=null;
728 + else
729 + return mVideoStream!=null;
730 + }
731 +
732 + public Stream getTrack(int id) {
733 + if (id==0)
734 + return mAudioStream;
735 + else
736 + return mVideoStream;
737 + }
738 +
739 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming;
20 +
21 +import java.io.IOException;
22 +import java.net.InetAddress;
23 +import net.majorkernelpanic.streaming.audio.AACStream;
24 +import net.majorkernelpanic.streaming.audio.AMRNBStream;
25 +import net.majorkernelpanic.streaming.audio.AudioQuality;
26 +import net.majorkernelpanic.streaming.audio.AudioStream;
27 +import net.majorkernelpanic.streaming.gl.SurfaceView;
28 +import net.majorkernelpanic.streaming.video.H263Stream;
29 +import net.majorkernelpanic.streaming.video.H264Stream;
30 +import net.majorkernelpanic.streaming.video.VideoQuality;
31 +import net.majorkernelpanic.streaming.video.VideoStream;
32 +import android.content.Context;
33 +import android.hardware.Camera.CameraInfo;
34 +import android.preference.PreferenceManager;
35 +
36 +/**
37 + * Call {@link #getInstance()} to get access to the SessionBuilder.
38 + */
39 +public class SessionBuilder {
40 +
41 + public final static String TAG = "SessionBuilder";
42 +
43 + /** Can be used with {@link #setVideoEncoder}. */
44 + public final static int VIDEO_NONE = 0;
45 +
46 + /** Can be used with {@link #setVideoEncoder}. */
47 + public final static int VIDEO_H264 = 1;
48 +
49 + /** Can be used with {@link #setVideoEncoder}. */
50 + public final static int VIDEO_H263 = 2;
51 +
52 + /** Can be used with {@link #setAudioEncoder}. */
53 + public final static int AUDIO_NONE = 0;
54 +
55 + /** Can be used with {@link #setAudioEncoder}. */
56 + public final static int AUDIO_AMRNB = 3;
57 +
58 + /** Can be used with {@link #setAudioEncoder}. */
59 + public final static int AUDIO_AAC = 5;
60 +
61 + // Default configuration
62 + private VideoQuality mVideoQuality = VideoQuality.DEFAULT_VIDEO_QUALITY;
63 + private AudioQuality mAudioQuality = AudioQuality.DEFAULT_AUDIO_QUALITY;
64 + private Context mContext;
65 + private int mVideoEncoder = VIDEO_H263;
66 + private int mAudioEncoder = AUDIO_AMRNB;
67 + private int mCamera = CameraInfo.CAMERA_FACING_BACK;
68 + private int mTimeToLive = 64;
69 + private int mOrientation = 0;
70 + private boolean mFlash = false;
71 + private SurfaceView mSurfaceView = null;
72 + private String mOrigin = null;
73 + private String mDestination = null;
74 + private Session.Callback mCallback = null;
75 +
76 + // Removes the default public constructor
77 + private SessionBuilder() {}
78 +
79 + // The SessionManager implements the singleton pattern
80 + private static volatile SessionBuilder sInstance = null;
81 +
82 + /**
83 + * Returns a reference to the {@link SessionBuilder}.
84 + * @return The reference to the {@link SessionBuilder}
85 + */
86 + public final static SessionBuilder getInstance() {
87 + if (sInstance == null) {
88 + synchronized (SessionBuilder.class) {
89 + if (sInstance == null) {
90 + SessionBuilder.sInstance = new SessionBuilder();
91 + }
92 + }
93 + }
94 + return sInstance;
95 + }
96 +
97 + /**
98 + * Creates a new {@link Session}.
99 + * @return The new Session
100 + * @throws IOException
101 + */
102 + public Session build() {
103 + Session session;
104 +
105 + session = new Session();
106 + session.setOrigin(mOrigin);
107 + session.setDestination(mDestination);
108 + session.setTimeToLive(mTimeToLive);
109 + session.setCallback(mCallback);
110 +
111 + switch (mAudioEncoder) {
112 + case AUDIO_AAC:
113 + AACStream stream = new AACStream();
114 + session.addAudioTrack(stream);
115 + if (mContext!=null)
116 + stream.setPreferences(PreferenceManager.getDefaultSharedPreferences(mContext));
117 + break;
118 + case AUDIO_AMRNB:
119 + session.addAudioTrack(new AMRNBStream());
120 + break;
121 + }
122 +
123 + switch (mVideoEncoder) {
124 + case VIDEO_H263:
125 + session.addVideoTrack(new H263Stream(mCamera));
126 + break;
127 + case VIDEO_H264:
128 + H264Stream stream = new H264Stream(mCamera);
129 + if (mContext!=null)
130 + stream.setPreferences(PreferenceManager.getDefaultSharedPreferences(mContext));
131 + session.addVideoTrack(stream);
132 + break;
133 + }
134 +
135 + if (session.getVideoTrack()!=null) {
136 + VideoStream video = session.getVideoTrack();
137 + video.setFlashState(mFlash);
138 + video.setVideoQuality(mVideoQuality);
139 + video.setSurfaceView(mSurfaceView);
140 + video.setPreviewOrientation(mOrientation);
141 + video.setDestinationPorts(5006);
142 + }
143 +
144 + if (session.getAudioTrack()!=null) {
145 + AudioStream audio = session.getAudioTrack();
146 + audio.setAudioQuality(mAudioQuality);
147 + audio.setDestinationPorts(5004);
148 + }
149 +
150 + return session;
151 +
152 + }
153 +
154 + /**
155 + * Access to the context is needed for the H264Stream class to store some stuff in the SharedPreferences.
156 + * Note that you should pass the Application context, not the context of an Activity.
157 + **/
158 + public SessionBuilder setContext(Context context) {
159 + mContext = context;
160 + return this;
161 + }
162 +
163 + /** Sets the destination of the session. */
164 + public SessionBuilder setDestination(String destination) {
165 + mDestination = destination;
166 + return this;
167 + }
168 +
169 + /** Sets the origin of the session. It appears in the SDP of the session. */
170 + public SessionBuilder setOrigin(String origin) {
171 + mOrigin = origin;
172 + return this;
173 + }
174 +
175 + /** Sets the video stream quality. */
176 + public SessionBuilder setVideoQuality(VideoQuality quality) {
177 + mVideoQuality = quality.clone();
178 + return this;
179 + }
180 +
181 + /** Sets the audio encoder. */
182 + public SessionBuilder setAudioEncoder(int encoder) {
183 + mAudioEncoder = encoder;
184 + return this;
185 + }
186 +
187 + /** Sets the audio quality. */
188 + public SessionBuilder setAudioQuality(AudioQuality quality) {
189 + mAudioQuality = quality.clone();
190 + return this;
191 + }
192 +
193 + /** Sets the default video encoder. */
194 + public SessionBuilder setVideoEncoder(int encoder) {
195 + mVideoEncoder = encoder;
196 + return this;
197 + }
198 +
199 + public SessionBuilder setFlashEnabled(boolean enabled) {
200 + mFlash = enabled;
201 + return this;
202 + }
203 +
204 + public SessionBuilder setCamera(int camera) {
205 + mCamera = camera;
206 + return this;
207 + }
208 +
209 + public SessionBuilder setTimeToLive(int ttl) {
210 + mTimeToLive = ttl;
211 + return this;
212 + }
213 +
214 + /**
215 + * Sets the SurfaceView required to preview the video stream.
216 + **/
217 + public SessionBuilder setSurfaceView(SurfaceView surfaceView) {
218 + mSurfaceView = surfaceView;
219 + return this;
220 + }
221 +
222 + /**
223 + * Sets the orientation of the preview.
224 + * @param orientation The orientation of the preview
225 + */
226 + public SessionBuilder setPreviewOrientation(int orientation) {
227 + mOrientation = orientation;
228 + return this;
229 + }
230 +
231 + public SessionBuilder setCallback(Session.Callback callback) {
232 + mCallback = callback;
233 + return this;
234 + }
235 +
236 + /** Returns the context set with {@link #setContext(Context)}*/
237 + public Context getContext() {
238 + return mContext;
239 + }
240 +
241 + /** Returns the destination ip address set with {@link #setDestination(String)}. */
242 + public String getDestination() {
243 + return mDestination;
244 + }
245 +
246 + /** Returns the origin ip address set with {@link #setOrigin(String)}. */
247 + public String getOrigin() {
248 + return mOrigin;
249 + }
250 +
251 + /** Returns the audio encoder set with {@link #setAudioEncoder(int)}. */
252 + public int getAudioEncoder() {
253 + return mAudioEncoder;
254 + }
255 +
256 + /** Returns the id of the {@link android.hardware.Camera} set with {@link #setCamera(int)}. */
257 + public int getCamera() {
258 + return mCamera;
259 + }
260 +
261 + /** Returns the video encoder set with {@link #setVideoEncoder(int)}. */
262 + public int getVideoEncoder() {
263 + return mVideoEncoder;
264 + }
265 +
266 + /** Returns the VideoQuality set with {@link #setVideoQuality(VideoQuality)}. */
267 + public VideoQuality getVideoQuality() {
268 + return mVideoQuality;
269 + }
270 +
271 + /** Returns the AudioQuality set with {@link #setAudioQuality(AudioQuality)}. */
272 + public AudioQuality getAudioQuality() {
273 + return mAudioQuality;
274 + }
275 +
276 + /** Returns the flash state set with {@link #setFlashEnabled(boolean)}. */
277 + public boolean getFlashState() {
278 + return mFlash;
279 + }
280 +
281 + /** Returns the SurfaceView set with {@link #setSurfaceView(SurfaceView)}. */
282 + public SurfaceView getSurfaceView() {
283 + return mSurfaceView;
284 + }
285 +
286 +
287 + /** Returns the time to live set with {@link #setTimeToLive(int)}. */
288 + public int getTimeToLive() {
289 + return mTimeToLive;
290 + }
291 +
292 + /** Returns a new {@link SessionBuilder} with the same configuration. */
293 + public SessionBuilder clone() {
294 + return new SessionBuilder()
295 + .setDestination(mDestination)
296 + .setOrigin(mOrigin)
297 + .setSurfaceView(mSurfaceView)
298 + .setPreviewOrientation(mOrientation)
299 + .setVideoQuality(mVideoQuality)
300 + .setVideoEncoder(mVideoEncoder)
301 + .setFlashEnabled(mFlash)
302 + .setCamera(mCamera)
303 + .setTimeToLive(mTimeToLive)
304 + .setAudioEncoder(mAudioEncoder)
305 + .setAudioQuality(mAudioQuality)
306 + .setContext(mContext)
307 + .setCallback(mCallback);
308 + }
309 +
310 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming;
20 +
21 +import java.io.IOException;
22 +import java.io.OutputStream;
23 +import java.net.InetAddress;
24 +
25 +/**
26 + * An interface that represents a Stream.
27 + */
28 +public interface Stream {
29 +
30 + /**
31 + * Configures the stream. You need to call this before calling {@link #getSessionDescription()}
32 + * to apply your configuration of the stream.
33 + */
34 + public void configure() throws IllegalStateException, IOException;
35 +
36 + /**
37 + * Starts the stream.
38 + * This method can only be called after {@link Stream#configure()}.
39 + */
40 + public void start() throws IllegalStateException, IOException;
41 +
42 + /**
43 + * Stops the stream.
44 + */
45 + public void stop();
46 +
47 + /**
48 + * Sets the Time To Live of packets sent over the network.
49 + * @param ttl The time to live
50 + * @throws IOException
51 + */
52 + public void setTimeToLive(int ttl) throws IOException;
53 +
54 + /**
55 + * Sets the destination ip address of the stream.
56 + * @param dest The destination address of the stream
57 + */
58 + public void setDestinationAddress(InetAddress dest);
59 +
60 + /**
61 + * Sets the destination ports of the stream.
62 + * If an odd number is supplied for the destination port then the next
63 + * lower even number will be used for RTP and it will be used for RTCP.
64 + * If an even number is supplied, it will be used for RTP and the next odd
65 + * number will be used for RTCP.
66 + * @param dport The destination port
67 + */
68 + public void setDestinationPorts(int dport);
69 +
70 + /**
71 + * Sets the destination ports of the stream.
72 + * @param rtpPort Destination port that will be used for RTP
73 + * @param rtcpPort Destination port that will be used for RTCP
74 + */
75 + public void setDestinationPorts(int rtpPort, int rtcpPort);
76 +
77 + /**
78 + * If a TCP is used as the transport protocol for the RTP session,
79 + * the output stream to which RTP packets will be written to must
80 + * be specified with this method.
81 + */
82 + public void setOutputStream(OutputStream stream, byte channelIdentifier);
83 +
84 + /**
85 + * Returns a pair of source ports, the first one is the
86 + * one used for RTP and the second one is used for RTCP.
87 + **/
88 + public int[] getLocalPorts();
89 +
90 + /**
91 + * Returns a pair of destination ports, the first one is the
92 + * one used for RTP and the second one is used for RTCP.
93 + **/
94 + public int[] getDestinationPorts();
95 +
96 +
97 + /**
98 + * Returns the SSRC of the underlying {@link net.majorkernelpanic.streaming.rtp.RtpSocket}.
99 + * @return the SSRC of the stream.
100 + */
101 + public int getSSRC();
102 +
103 + /**
104 + * Returns an approximation of the bit rate consumed by the stream in bit per seconde.
105 + */
106 + public long getBitrate();
107 +
108 + /**
109 + * Returns a description of the stream using SDP.
110 + * This method can only be called after {@link Stream#configure()}.
111 + * @throws IllegalStateException Thrown when {@link Stream#configure()} wa not called.
112 + */
113 + public String getSessionDescription() throws IllegalStateException;
114 +
115 + public boolean isStreaming();
116 +
117 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.audio;
20 +
21 +import java.io.File;
22 +import java.io.IOException;
23 +import java.io.RandomAccessFile;
24 +import java.lang.reflect.Field;
25 +import java.net.InetAddress;
26 +import java.nio.ByteBuffer;
27 +import net.majorkernelpanic.streaming.SessionBuilder;
28 +import net.majorkernelpanic.streaming.rtp.AACADTSPacketizer;
29 +import net.majorkernelpanic.streaming.rtp.AACLATMPacketizer;
30 +import net.majorkernelpanic.streaming.rtp.MediaCodecInputStream;
31 +import android.annotation.SuppressLint;
32 +import android.content.SharedPreferences;
33 +import android.content.SharedPreferences.Editor;
34 +import android.media.AudioFormat;
35 +import android.media.AudioRecord;
36 +import android.media.MediaCodec;
37 +import android.media.MediaCodecInfo;
38 +import android.media.MediaFormat;
39 +import android.media.MediaRecorder;
40 +import android.os.Build;
41 +import android.os.Environment;
42 +import android.service.textservice.SpellCheckerService.Session;
43 +import android.util.Log;
44 +
45 +/**
46 + * A class for streaming AAC from the camera of an android device using RTP.
47 + * You should use a {@link Session} instantiated with {@link SessionBuilder} instead of using this class directly.
48 + * Call {@link #setDestinationAddress(InetAddress)}, {@link #setDestinationPorts(int)} and {@link #setAudioQuality(AudioQuality)}
49 + * to configure the stream. You can then call {@link #start()} to start the RTP stream.
50 + * Call {@link #stop()} to stop the stream.
51 + */
52 +public class AACStream extends AudioStream {
53 +
54 + public final static String TAG = "AACStream";
55 +
56 + /** MPEG-4 Audio Object Types supported by ADTS. **/
57 + private static final String[] AUDIO_OBJECT_TYPES = {
58 + "NULL", // 0
59 + "AAC Main", // 1
60 + "AAC LC (Low Complexity)", // 2
61 + "AAC SSR (Scalable Sample Rate)", // 3
62 + "AAC LTP (Long Term Prediction)" // 4
63 + };
64 +
65 + /** There are 13 supported frequencies by ADTS. **/
66 + public static final int[] AUDIO_SAMPLING_RATES = {
67 + 96000, // 0
68 + 88200, // 1
69 + 64000, // 2
70 + 48000, // 3
71 + 44100, // 4
72 + 32000, // 5
73 + 24000, // 6
74 + 22050, // 7
75 + 16000, // 8
76 + 12000, // 9
77 + 11025, // 10
78 + 8000, // 11
79 + 7350, // 12
80 + -1, // 13
81 + -1, // 14
82 + -1, // 15
83 + };
84 +
85 + private String mSessionDescription = null;
86 + private int mProfile, mSamplingRateIndex, mChannel, mConfig;
87 + private SharedPreferences mSettings = null;
88 + private AudioRecord mAudioRecord = null;
89 + private Thread mThread = null;
90 +
91 + public AACStream() {
92 + super();
93 +
94 + if (!AACStreamingSupported()) {
95 + Log.e(TAG,"AAC not supported on this phone");
96 + throw new RuntimeException("AAC not supported by this phone !");
97 + } else {
98 + Log.d(TAG,"AAC supported on this phone");
99 + }
100 +
101 + }
102 +
103 + private static boolean AACStreamingSupported() {
104 + if (Build.VERSION.SDK_INT<14) return false;
105 + try {
106 + MediaRecorder.OutputFormat.class.getField("AAC_ADTS");
107 + return true;
108 + } catch (Exception e) {
109 + return false;
110 + }
111 + }
112 +
113 + /**
114 + * Some data (the actual sampling rate used by the phone and the AAC profile) needs to be stored once {@link #getSessionDescription()} is called.
115 + * @param prefs The SharedPreferences that will be used to store the sampling rate
116 + */
117 + public void setPreferences(SharedPreferences prefs) {
118 + mSettings = prefs;
119 + }
120 +
121 + @Override
122 + public synchronized void start() throws IllegalStateException, IOException {
123 + if (!mStreaming) {
124 + configure();
125 + super.start();
126 + }
127 + }
128 +
129 + public synchronized void configure() throws IllegalStateException, IOException {
130 + super.configure();
131 + mQuality = mRequestedQuality.clone();
132 +
133 + // Checks if the user has supplied an exotic sampling rate
134 + int i=0;
135 + for (;i<AUDIO_SAMPLING_RATES.length;i++) {
136 + if (AUDIO_SAMPLING_RATES[i] == mQuality.samplingRate) {
137 + mSamplingRateIndex = i;
138 + break;
139 + }
140 + }
141 + // If he did, we force a reasonable one: 16 kHz
142 + if (i>12) mQuality.samplingRate = 16000;
143 +
144 + if (mMode != mRequestedMode || mPacketizer==null) {
145 + mMode = mRequestedMode;
146 + if (mMode == MODE_MEDIARECORDER_API) {
147 + mPacketizer = new AACADTSPacketizer();
148 + } else {
149 + mPacketizer = new AACLATMPacketizer();
150 + }
151 + mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort);
152 + mPacketizer.getRtpSocket().setOutputStream(mOutputStream, mChannelIdentifier);
153 + }
154 +
155 + if (mMode == MODE_MEDIARECORDER_API) {
156 +
157 + testADTS();
158 +
159 + // All the MIME types parameters used here are described in RFC 3640
160 + // SizeLength: 13 bits will be enough because ADTS uses 13 bits for frame length
161 + // config: contains the object type + the sampling rate + the channel number
162 +
163 + // TODO: streamType always 5 ? profile-level-id always 15 ?
164 +
165 + mSessionDescription = "m=audio "+String.valueOf(getDestinationPorts()[0])+" RTP/AVP 96\r\n" +
166 + "a=rtpmap:96 mpeg4-generic/"+mQuality.samplingRate+"\r\n"+
167 + "a=fmtp:96 streamtype=5; profile-level-id=15; mode=AAC-hbr; config="+Integer.toHexString(mConfig)+"; SizeLength=13; IndexLength=3; IndexDeltaLength=3;\r\n";
168 +
169 + } else {
170 +
171 + mProfile = 2; // AAC LC
172 + mChannel = 1;
173 + mConfig = (mProfile & 0x1F) << 11 | (mSamplingRateIndex & 0x0F) << 7 | (mChannel & 0x0F) << 3;
174 +
175 + mSessionDescription = "m=audio "+String.valueOf(getDestinationPorts()[0])+" RTP/AVP 96\r\n" +
176 + "a=rtpmap:96 mpeg4-generic/"+mQuality.samplingRate+"\r\n"+
177 + "a=fmtp:96 streamtype=5; profile-level-id=15; mode=AAC-hbr; config="+Integer.toHexString(mConfig)+"; SizeLength=13; IndexLength=3; IndexDeltaLength=3;\r\n";
178 +
179 + }
180 +
181 + }
182 +
183 + @Override
184 + protected void encodeWithMediaRecorder() throws IOException {
185 + testADTS();
186 + ((AACADTSPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate);
187 + super.encodeWithMediaRecorder();
188 + }
189 +
190 + @Override
191 + @SuppressLint({ "InlinedApi", "NewApi" })
192 + protected void encodeWithMediaCodec() throws IOException {
193 +
194 + final int bufferSize = AudioRecord.getMinBufferSize(mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)*2;
195 +
196 + ((AACLATMPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate);
197 +
198 + mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
199 + mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
200 + MediaFormat format = new MediaFormat();
201 + format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
202 + format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate);
203 + format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
204 + format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate);
205 + format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
206 + format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
207 + mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
208 + mAudioRecord.startRecording();
209 + mMediaCodec.start();
210 +
211 + final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);
212 + final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
213 +
214 + mThread = new Thread(new Runnable() {
215 + @Override
216 + public void run() {
217 + int len = 0, bufferIndex = 0;
218 + try {
219 + while (!Thread.interrupted()) {
220 + bufferIndex = mMediaCodec.dequeueInputBuffer(10000);
221 + if (bufferIndex>=0) {
222 + inputBuffers[bufferIndex].clear();
223 + len = mAudioRecord.read(inputBuffers[bufferIndex], bufferSize);
224 + if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) {
225 + Log.e(TAG,"An error occured with the AudioRecord API !");
226 + } else {
227 + //Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity());
228 + mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime()/1000, 0);
229 + }
230 + }
231 + }
232 + } catch (RuntimeException e) {
233 + e.printStackTrace();
234 + }
235 + }
236 + });
237 +
238 + mThread.start();
239 +
240 + // The packetizer encapsulates this stream in an RTP stream and send it over the network
241 + mPacketizer.setInputStream(inputStream);
242 + mPacketizer.start();
243 +
244 + mStreaming = true;
245 +
246 + }
247 +
248 + /** Stops the stream. */
249 + public synchronized void stop() {
250 + if (mStreaming) {
251 + if (mMode==MODE_MEDIACODEC_API) {
252 + Log.d(TAG, "Interrupting threads...");
253 + mThread.interrupt();
254 + mAudioRecord.stop();
255 + mAudioRecord.release();
256 + mAudioRecord = null;
257 + }
258 + super.stop();
259 + }
260 + }
261 +
262 + /**
263 + * Returns a description of the stream using SDP. It can then be included in an SDP file.
264 + * Will fail if called when streaming.
265 + */
266 + public String getSessionDescription() throws IllegalStateException {
267 + if (mSessionDescription == null) throw new IllegalStateException("You need to call configure() first !");
268 + return mSessionDescription;
269 + }
270 +
271 + /**
272 + * Records a short sample of AAC ADTS from the microphone to find out what the sampling rate really is
273 + * On some phone indeed, no error will be reported if the sampling rate used differs from the
274 + * one selected with setAudioSamplingRate
275 + * @throws IOException
276 + * @throws IllegalStateException
277 + */
278 + @SuppressLint("InlinedApi")
279 + private void testADTS() throws IllegalStateException, IOException {
280 +
281 + setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
282 + try {
283 + Field name = MediaRecorder.OutputFormat.class.getField("AAC_ADTS");
284 + setOutputFormat(name.getInt(null));
285 + }
286 + catch (Exception ignore) {
287 + setOutputFormat(6);
288 + }
289 +
290 + String key = PREF_PREFIX+"aac-"+mQuality.samplingRate;
291 +
292 + if (mSettings!=null && mSettings.contains(key)) {
293 + String[] s = mSettings.getString(key, "").split(",");
294 + mQuality.samplingRate = Integer.valueOf(s[0]);
295 + mConfig = Integer.valueOf(s[1]);
296 + mChannel = Integer.valueOf(s[2]);
297 + return;
298 + }
299 +
300 + final String TESTFILE = Environment.getExternalStorageDirectory().getPath()+"/spydroid-test.adts";
301 +
302 + if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
303 + throw new IllegalStateException("No external storage or external storage not ready !");
304 + }
305 +
306 + // The structure of an ADTS packet is described here: http://wiki.multimedia.cx/index.php?title=ADTS
307 +
308 + // ADTS header is 7 or 9 bytes long
309 + byte[] buffer = new byte[9];
310 +
311 + mMediaRecorder = new MediaRecorder();
312 + mMediaRecorder.setAudioSource(mAudioSource);
313 + mMediaRecorder.setOutputFormat(mOutputFormat);
314 + mMediaRecorder.setAudioEncoder(mAudioEncoder);
315 + mMediaRecorder.setAudioChannels(1);
316 + mMediaRecorder.setAudioSamplingRate(mQuality.samplingRate);
317 + mMediaRecorder.setAudioEncodingBitRate(mQuality.bitRate);
318 + mMediaRecorder.setOutputFile(TESTFILE);
319 + mMediaRecorder.setMaxDuration(1000);
320 + mMediaRecorder.prepare();
321 + mMediaRecorder.start();
322 +
323 + // We record for 1 sec
324 + // TODO: use the MediaRecorder.OnInfoListener
325 + try {
326 + Thread.sleep(2000);
327 + } catch (InterruptedException e) {}
328 +
329 + mMediaRecorder.stop();
330 + mMediaRecorder.release();
331 + mMediaRecorder = null;
332 +
333 + File file = new File(TESTFILE);
334 + RandomAccessFile raf = new RandomAccessFile(file, "r");
335 +
336 + // ADTS packets start with a sync word: 12bits set to 1
337 + while (true) {
338 + if ( (raf.readByte()&0xFF) == 0xFF ) {
339 + buffer[0] = raf.readByte();
340 + if ( (buffer[0]&0xF0) == 0xF0) break;
341 + }
342 + }
343 +
344 + raf.read(buffer,1,5);
345 +
346 + mSamplingRateIndex = (buffer[1]&0x3C)>>2 ;
347 + mProfile = ( (buffer[1]&0xC0) >> 6 ) + 1 ;
348 + mChannel = (buffer[1]&0x01) << 2 | (buffer[2]&0xC0) >> 6 ;
349 + mQuality.samplingRate = AUDIO_SAMPLING_RATES[mSamplingRateIndex];
350 +
351 + // 5 bits for the object type / 4 bits for the sampling rate / 4 bits for the channel / padding
352 + mConfig = (mProfile & 0x1F) << 11 | (mSamplingRateIndex & 0x0F) << 7 | (mChannel & 0x0F) << 3;
353 +
354 + Log.i(TAG,"MPEG VERSION: " + ( (buffer[0]&0x08) >> 3 ) );
355 + Log.i(TAG,"PROTECTION: " + (buffer[0]&0x01) );
356 + Log.i(TAG,"PROFILE: " + AUDIO_OBJECT_TYPES[ mProfile ] );
357 + Log.i(TAG,"SAMPLING FREQUENCY: " + mQuality.samplingRate );
358 + Log.i(TAG,"CHANNEL: " + mChannel );
359 +
360 + raf.close();
361 +
362 + if (mSettings!=null) {
363 + Editor editor = mSettings.edit();
364 + editor.putString(key, mQuality.samplingRate+","+mConfig+","+mChannel);
365 + editor.commit();
366 + }
367 +
368 + if (!file.delete()) Log.e(TAG,"Temp file could not be erased");
369 +
370 + }
371 +
372 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.audio;
20 +
21 +import java.io.IOException;
22 +import java.lang.reflect.Field;
23 +import net.majorkernelpanic.streaming.SessionBuilder;
24 +import net.majorkernelpanic.streaming.rtp.AMRNBPacketizer;
25 +import android.media.MediaRecorder;
26 +import android.service.textservice.SpellCheckerService.Session;
27 +
28 +/**
29 + * A class for streaming AAC from the camera of an android device using RTP.
30 + * You should use a {@link Session} instantiated with {@link SessionBuilder} instead of using this class directly.
31 + * Call {@link #setDestinationAddress(InetAddress)}, {@link #setDestinationPorts(int)} and {@link #setAudioQuality(AudioQuality)}
32 + * to configure the stream. You can then call {@link #start()} to start the RTP stream.
33 + * Call {@link #stop()} to stop the stream.
34 + */
35 +public class AMRNBStream extends AudioStream {
36 +
37 + public AMRNBStream() {
38 + super();
39 +
40 + mPacketizer = new AMRNBPacketizer();
41 +
42 + setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
43 +
44 + try {
45 + // RAW_AMR was deprecated in API level 16.
46 + Field deprecatedName = MediaRecorder.OutputFormat.class.getField("RAW_AMR");
47 + setOutputFormat(deprecatedName.getInt(null));
48 + } catch (Exception e) {
49 + setOutputFormat(MediaRecorder.OutputFormat.AMR_NB);
50 + }
51 +
52 + setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
53 +
54 + }
55 +
56 + /**
57 + * Starts the stream.
58 + */
59 + public synchronized void start() throws IllegalStateException, IOException {
60 + if (!mStreaming) {
61 + configure();
62 + super.start();
63 + }
64 + }
65 +
66 + public synchronized void configure() throws IllegalStateException, IOException {
67 + super.configure();
68 + mMode = MODE_MEDIARECORDER_API;
69 + mQuality = mRequestedQuality.clone();
70 + }
71 +
72 + /**
73 + * Returns a description of the stream using SDP. It can then be included in an SDP file.
74 + */
75 + public String getSessionDescription() {
76 + return "m=audio "+String.valueOf(getDestinationPorts()[0])+" RTP/AVP 96\r\n" +
77 + "a=rtpmap:96 AMR/8000\r\n" +
78 + "a=fmtp:96 octet-align=1;\r\n";
79 + }
80 +
81 + @Override
82 + protected void encodeWithMediaCodec() throws IOException {
83 + super.encodeWithMediaRecorder();
84 + }
85 +
86 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.audio;
20 +
21 +/**
22 + * A class that represents the quality of an audio stream.
23 + */
24 +public class AudioQuality {
25 +
26 + /** Default audio stream quality. */
27 + public final static AudioQuality DEFAULT_AUDIO_QUALITY = new AudioQuality(8000,32000);
28 +
29 + /** Represents a quality for a video stream. */
30 + public AudioQuality() {}
31 +
32 + /**
33 + * Represents a quality for an audio stream.
34 + * @param samplingRate The sampling rate
35 + * @param bitRate The bitrate in bit per seconds
36 + */
37 + public AudioQuality(int samplingRate, int bitRate) {
38 + this.samplingRate = samplingRate;
39 + this.bitRate = bitRate;
40 + }
41 +
42 + public int samplingRate = 0;
43 + public int bitRate = 0;
44 +
45 + public boolean equals(AudioQuality quality) {
46 + if (quality==null) return false;
47 + return (quality.samplingRate == this.samplingRate &&
48 + quality.bitRate == this.bitRate);
49 + }
50 +
51 + public AudioQuality clone() {
52 + return new AudioQuality(samplingRate, bitRate);
53 + }
54 +
55 + public static AudioQuality parseQuality(String str) {
56 + AudioQuality quality = DEFAULT_AUDIO_QUALITY.clone();
57 + if (str != null) {
58 + String[] config = str.split("-");
59 + try {
60 + quality.bitRate = Integer.parseInt(config[0])*1000; // conversion to bit/s
61 + quality.samplingRate = Integer.parseInt(config[1]);
62 + }
63 + catch (IndexOutOfBoundsException ignore) {}
64 + }
65 + return quality;
66 + }
67 +
68 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.audio;
20 +
21 +import java.io.FileDescriptor;
22 +import java.io.IOException;
23 +import java.io.InputStream;
24 +
25 +import net.majorkernelpanic.streaming.MediaStream;
26 +import android.media.MediaRecorder;
27 +import android.os.ParcelFileDescriptor;
28 +import android.util.Log;
29 +
30 +/**
31 + * Don't use this class directly.
32 + */
33 +public abstract class AudioStream extends MediaStream {
34 +
35 + protected int mAudioSource;
36 + protected int mOutputFormat;
37 + protected int mAudioEncoder;
38 + protected AudioQuality mRequestedQuality = AudioQuality.DEFAULT_AUDIO_QUALITY.clone();
39 + protected AudioQuality mQuality = mRequestedQuality.clone();
40 +
41 + public AudioStream() {
42 + setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
43 + }
44 +
45 + public void setAudioSource(int audioSource) {
46 + mAudioSource = audioSource;
47 + }
48 +
49 + public void setAudioQuality(AudioQuality quality) {
50 + mRequestedQuality = quality;
51 + }
52 +
53 + /**
54 + * Returns the quality of the stream.
55 + */
56 + public AudioQuality getAudioQuality() {
57 + return mQuality;
58 + }
59 +
60 + protected void setAudioEncoder(int audioEncoder) {
61 + mAudioEncoder = audioEncoder;
62 + }
63 +
64 + protected void setOutputFormat(int outputFormat) {
65 + mOutputFormat = outputFormat;
66 + }
67 +
68 + @Override
69 + protected void encodeWithMediaRecorder() throws IOException {
70 +
71 + // We need a local socket to forward data output by the camera to the packetizer
72 + createSockets();
73 +
74 + Log.v(TAG,"Requested audio with "+mQuality.bitRate/1000+"kbps"+" at "+mQuality.samplingRate/1000+"kHz");
75 +
76 + mMediaRecorder = new MediaRecorder();
77 + mMediaRecorder.setAudioSource(mAudioSource);
78 + mMediaRecorder.setOutputFormat(mOutputFormat);
79 + mMediaRecorder.setAudioEncoder(mAudioEncoder);
80 + mMediaRecorder.setAudioChannels(1);
81 + mMediaRecorder.setAudioSamplingRate(mQuality.samplingRate);
82 + mMediaRecorder.setAudioEncodingBitRate(mQuality.bitRate);
83 +
84 + // We write the output of the camera in a local socket instead of a file !
85 + // This one little trick makes streaming feasible quiet simply: data from the camera
86 + // can then be manipulated at the other end of the socket
87 + FileDescriptor fd = null;
88 + if (sPipeApi == PIPE_API_PFD) {
89 + fd = mParcelWrite.getFileDescriptor();
90 + } else {
91 + fd = mSender.getFileDescriptor();
92 + }
93 + mMediaRecorder.setOutputFile(fd);
94 + mMediaRecorder.setOutputFile(fd);
95 +
96 + mMediaRecorder.prepare();
97 + mMediaRecorder.start();
98 +
99 + InputStream is = null;
100 +
101 + if (sPipeApi == PIPE_API_PFD) {
102 + is = new ParcelFileDescriptor.AutoCloseInputStream(mParcelRead);
103 + } else {
104 + try {
105 + // mReceiver.getInputStream contains the data from the camera
106 + is = mReceiver.getInputStream();
107 + } catch (IOException e) {
108 + stop();
109 + throw new IOException("Something happened with the local sockets :/ Start failed !");
110 + }
111 + }
112 +
113 + // the mPacketizer encapsulates this stream in an RTP stream and send it over the network
114 + mPacketizer.setInputStream(is);
115 + mPacketizer.start();
116 + mStreaming = true;
117 +
118 + }
119 +
120 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.exceptions;
20 +
21 +public class CameraInUseException extends RuntimeException {
22 +
23 + public CameraInUseException(String message) {
24 + super(message);
25 + }
26 +
27 + private static final long serialVersionUID = -1866132102949435675L;
28 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.exceptions;
20 +
21 +public class ConfNotSupportedException extends RuntimeException {
22 +
23 + public ConfNotSupportedException(String message) {
24 + super(message);
25 + }
26 +
27 + private static final long serialVersionUID = 5876298277802827615L;
28 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.exceptions;
20 +
21 +public class InvalidSurfaceException extends RuntimeException {
22 +
23 + private static final long serialVersionUID = -7238661340093544496L;
24 +
25 + public InvalidSurfaceException(String message) {
26 + super(message);
27 + }
28 +
29 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.exceptions;
20 +
21 +import java.io.IOException;
22 +
23 +public class StorageUnavailableException extends IOException {
24 +
25 + public StorageUnavailableException(String message) {
26 + super(message);
27 + }
28 +
29 + private static final long serialVersionUID = -7537890350373995089L;
30 +}
1 +/*
2 + * Based on the work of fadden
3 + *
4 + * Copyright 2012 Google Inc. All Rights Reserved.
5 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
6 + *
7 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
8 + *
9 + * Licensed under the Apache License, Version 2.0 (the "License");
10 + * you may not use this file except in compliance with the License.
11 + * You may obtain a copy of the License at
12 + *
13 + * http://www.apache.org/licenses/LICENSE-2.0
14 + *
15 + * Unless required by applicable law or agreed to in writing, software
16 + * distributed under the License is distributed on an "AS IS" BASIS,
17 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 + * See the License for the specific language governing permissions and
19 + * limitations under the License.
20 + */
21 +
22 +package net.majorkernelpanic.streaming.gl;
23 +
24 +import android.annotation.SuppressLint;
25 +import android.opengl.EGL14;
26 +import android.opengl.EGLConfig;
27 +import android.opengl.EGLContext;
28 +import android.opengl.EGLDisplay;
29 +import android.opengl.EGLExt;
30 +import android.opengl.EGLSurface;
31 +import android.opengl.GLES20;
32 +import android.view.Surface;
33 +
34 +@SuppressLint("NewApi")
35 +public class SurfaceManager {
36 +
37 + public final static String TAG = "TextureManager";
38 +
39 + private static final int EGL_RECORDABLE_ANDROID = 0x3142;
40 +
41 + private EGLContext mEGLContext = null;
42 + private EGLContext mEGLSharedContext = null;
43 + private EGLSurface mEGLSurface = null;
44 + private EGLDisplay mEGLDisplay = null;
45 +
46 + private Surface mSurface;
47 +
48 + /**
49 + * Creates an EGL context and an EGL surface.
50 + */
51 + public SurfaceManager(Surface surface, SurfaceManager manager) {
52 + mSurface = surface;
53 + mEGLSharedContext = manager.mEGLContext;
54 + eglSetup();
55 + }
56 +
57 + /**
58 + * Creates an EGL context and an EGL surface.
59 + */
60 + public SurfaceManager(Surface surface) {
61 + mSurface = surface;
62 + eglSetup();
63 + }
64 +
65 + public void makeCurrent() {
66 + if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext))
67 + throw new RuntimeException("eglMakeCurrent failed");
68 + }
69 +
70 + public void swapBuffer() {
71 + EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
72 + }
73 +
74 + /**
75 + * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
76 + */
77 + public void setPresentationTime(long nsecs) {
78 + EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
79 + checkEglError("eglPresentationTimeANDROID");
80 + }
81 +
82 + /**
83 + * Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
84 + */
85 + private void eglSetup() {
86 + mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
87 + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
88 + throw new RuntimeException("unable to get EGL14 display");
89 + }
90 + int[] version = new int[2];
91 + if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
92 + throw new RuntimeException("unable to initialize EGL14");
93 + }
94 +
95 + // Configure EGL for recording and OpenGL ES 2.0.
96 + int[] attribList;
97 + if (mEGLSharedContext == null) {
98 + attribList = new int[] {
99 + EGL14.EGL_RED_SIZE, 8,
100 + EGL14.EGL_GREEN_SIZE, 8,
101 + EGL14.EGL_BLUE_SIZE, 8,
102 + EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
103 + EGL14.EGL_NONE
104 + };
105 + } else {
106 + attribList = new int[] {
107 + EGL14.EGL_RED_SIZE, 8,
108 + EGL14.EGL_GREEN_SIZE, 8,
109 + EGL14.EGL_BLUE_SIZE, 8,
110 + EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
111 + EGL_RECORDABLE_ANDROID, 1,
112 + EGL14.EGL_NONE
113 + };
114 + }
115 + EGLConfig[] configs = new EGLConfig[1];
116 + int[] numConfigs = new int[1];
117 + EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
118 + numConfigs, 0);
119 + checkEglError("eglCreateContext RGB888+recordable ES2");
120 +
121 + // Configure context for OpenGL ES 2.0.
122 + int[] attrib_list = {
123 + EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
124 + EGL14.EGL_NONE
125 + };
126 +
127 + if (mEGLSharedContext == null) {
128 + mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT, attrib_list, 0);
129 + } else {
130 + mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], mEGLSharedContext, attrib_list, 0);
131 + }
132 + checkEglError("eglCreateContext");
133 +
134 + // Create a window surface, and attach it to the Surface we received.
135 + int[] surfaceAttribs = {
136 + EGL14.EGL_NONE
137 + };
138 + mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
139 + surfaceAttribs, 0);
140 + checkEglError("eglCreateWindowSurface");
141 +
142 + GLES20.glDisable(GLES20.GL_DEPTH_TEST);
143 + GLES20.glDisable(GLES20.GL_CULL_FACE);
144 +
145 + }
146 +
147 + /**
148 + * Discards all resources held by this class, notably the EGL context. Also releases the
149 + * Surface that was passed to our constructor.
150 + */
151 + public void release() {
152 + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
153 + EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
154 + EGL14.EGL_NO_CONTEXT);
155 + EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
156 + EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
157 + EGL14.eglReleaseThread();
158 + EGL14.eglTerminate(mEGLDisplay);
159 + }
160 + mEGLDisplay = EGL14.EGL_NO_DISPLAY;
161 + mEGLContext = EGL14.EGL_NO_CONTEXT;
162 + mEGLSurface = EGL14.EGL_NO_SURFACE;
163 + mSurface.release();
164 + }
165 +
166 + /**
167 + * Checks for EGL errors. Throws an exception if one is found.
168 + */
169 + private void checkEglError(String msg) {
170 + int error;
171 + if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
172 + throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
173 + }
174 + }
175 +
176 +
177 +
178 +
179 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.gl;
20 +
21 +import java.util.concurrent.Semaphore;
22 +import net.majorkernelpanic.streaming.MediaStream;
23 +import net.majorkernelpanic.streaming.video.VideoStream;
24 +import android.content.Context;
25 +import android.graphics.SurfaceTexture;
26 +import android.graphics.SurfaceTexture.OnFrameAvailableListener;
27 +import android.os.Handler;
28 +import android.util.AttributeSet;
29 +import android.util.Log;
30 +import android.view.Surface;
31 +import android.view.SurfaceHolder;
32 +
33 +/**
34 + * An enhanced SurfaceView in which the camera preview will be rendered.
35 + * This class was needed for two reasons. <br />
36 + *
37 + * First, it allows to use to feed MediaCodec with the camera preview
38 + * using the surface-to-buffer method while rendering it in a surface
39 + * visible to the user. To force the surface-to-buffer method in
40 + * libstreaming, call {@link MediaStream#setStreamingMethod(byte)}
41 + * with {@link MediaStream#MODE_MEDIACODEC_API_2}. <br />
42 + *
43 + * Second, it allows to force the aspect ratio of the SurfaceView
44 + * to match the aspect ratio of the camera preview, so that the
45 + * preview do not appear distorted to the user of your app. To do
46 + * that, call {@link SurfaceView#setAspectRatioMode(int)} with
47 + * {@link SurfaceView#ASPECT_RATIO_PREVIEW} after creating your
48 + * {@link SurfaceView}. <br />
49 + *
50 + */
51 +public class SurfaceView extends android.view.SurfaceView implements Runnable, OnFrameAvailableListener, SurfaceHolder.Callback {
52 +
53 + public final static String TAG = "SurfaceView";
54 +
55 + /**
56 + * The aspect ratio of the surface view will be equal
57 + * to the aspect ration of the camera preview.
58 + **/
59 + public static final int ASPECT_RATIO_PREVIEW = 0x01;
60 +
61 + /** The surface view will fill completely fill its parent. */
62 + public static final int ASPECT_RATIO_STRETCH = 0x00;
63 +
64 + private Thread mThread = null;
65 + private Handler mHandler = null;
66 + private boolean mFrameAvailable = false;
67 + private boolean mRunning = true;
68 + private int mAspectRatioMode = ASPECT_RATIO_STRETCH;
69 +
70 + // The surface in which the preview is rendered
71 + private SurfaceManager mViewSurfaceManager = null;
72 +
73 + // The input surface of the MediaCodec
74 + private SurfaceManager mCodecSurfaceManager = null;
75 +
76 + // Handles the rendering of the SurfaceTexture we got
77 + // from the camera, onto a Surface
78 + private TextureManager mTextureManager = null;
79 +
80 + private final Semaphore mLock = new Semaphore(0);
81 + private final Object mSyncObject = new Object();
82 +
83 + // Allows to force the aspect ratio of the preview
84 + private ViewAspectRatioMeasurer mVARM = new ViewAspectRatioMeasurer();
85 +
86 + public SurfaceView(Context context, AttributeSet attrs) {
87 + super(context, attrs);
88 + mHandler = new Handler();
89 + getHolder().addCallback(this);
90 + }
91 +
92 + public void setAspectRatioMode(int mode) {
93 + mAspectRatioMode = mode;
94 + }
95 +
96 + public SurfaceTexture getSurfaceTexture() {
97 + return mTextureManager.getSurfaceTexture();
98 + }
99 +
100 + public void addMediaCodecSurface(Surface surface) {
101 + synchronized (mSyncObject) {
102 + mCodecSurfaceManager = new SurfaceManager(surface,mViewSurfaceManager);
103 + }
104 + }
105 +
106 + public void removeMediaCodecSurface() {
107 + synchronized (mSyncObject) {
108 + if (mCodecSurfaceManager != null) {
109 + mCodecSurfaceManager.release();
110 + mCodecSurfaceManager = null;
111 + }
112 + }
113 + }
114 +
115 + public void startGLThread() {
116 + Log.d(TAG,"Thread started.");
117 + if (mTextureManager == null) {
118 + mTextureManager = new TextureManager();
119 + }
120 + if (mTextureManager.getSurfaceTexture() == null) {
121 + mThread = new Thread(SurfaceView.this);
122 + mRunning = true;
123 + mThread.start();
124 + mLock.acquireUninterruptibly();
125 + }
126 + }
127 +
128 + @Override
129 + public void run() {
130 +
131 + mViewSurfaceManager = new SurfaceManager(getHolder().getSurface());
132 + mViewSurfaceManager.makeCurrent();
133 + mTextureManager.createTexture().setOnFrameAvailableListener(this);
134 +
135 + mLock.release();
136 +
137 + try {
138 + long ts = 0, oldts = 0;
139 + while (mRunning) {
140 + synchronized (mSyncObject) {
141 + mSyncObject.wait(2500);
142 + if (mFrameAvailable) {
143 + mFrameAvailable = false;
144 +
145 + mViewSurfaceManager.makeCurrent();
146 + mTextureManager.updateFrame();
147 + mTextureManager.drawFrame();
148 + mViewSurfaceManager.swapBuffer();
149 +
150 + if (mCodecSurfaceManager != null) {
151 + mCodecSurfaceManager.makeCurrent();
152 + mTextureManager.drawFrame();
153 + oldts = ts;
154 + ts = mTextureManager.getSurfaceTexture().getTimestamp();
155 + //Log.d(TAG,"FPS: "+(1000000000/(ts-oldts)));
156 + mCodecSurfaceManager.setPresentationTime(ts);
157 + mCodecSurfaceManager.swapBuffer();
158 + }
159 +
160 + } else {
161 + Log.e(TAG,"No frame received !");
162 + }
163 + }
164 + }
165 + } catch (InterruptedException ignore) {
166 + } finally {
167 + mViewSurfaceManager.release();
168 + mTextureManager.release();
169 + }
170 + }
171 +
172 + @Override
173 + public void onFrameAvailable(SurfaceTexture surfaceTexture) {
174 + synchronized (mSyncObject) {
175 + mFrameAvailable = true;
176 + mSyncObject.notifyAll();
177 + }
178 + }
179 +
180 + @Override
181 + public void surfaceChanged(SurfaceHolder holder, int format, int width,
182 + int height) {
183 + }
184 +
185 + @Override
186 + public void surfaceCreated(SurfaceHolder holder) {
187 + }
188 +
189 + @Override
190 + public void surfaceDestroyed(SurfaceHolder holder) {
191 + if (mThread != null) {
192 + mThread.interrupt();
193 + }
194 + mRunning = false;
195 + }
196 +
197 + @Override
198 + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
199 + if (mVARM.getAspectRatio() > 0 && mAspectRatioMode == ASPECT_RATIO_PREVIEW) {
200 + mVARM.measure(widthMeasureSpec, heightMeasureSpec);
201 + setMeasuredDimension(mVARM.getMeasuredWidth(), mVARM.getMeasuredHeight());
202 + } else {
203 + super.onMeasure(widthMeasureSpec, heightMeasureSpec);
204 + }
205 + }
206 +
207 + /**
208 + * Requests a certain aspect ratio for the preview. You don't have to call this yourself,
209 + * the {@link VideoStream} will do it when it's needed.
210 + */
211 + public void requestAspectRatio(double aspectRatio) {
212 + if (mVARM.getAspectRatio() != aspectRatio) {
213 + mVARM.setAspectRatio(aspectRatio);
214 + mHandler.post(new Runnable() {
215 + @Override
216 + public void run() {
217 + if (mAspectRatioMode == ASPECT_RATIO_PREVIEW) {
218 + requestLayout();
219 + }
220 + }
221 + });
222 + }
223 + }
224 +
225 + /**
226 + * This class is a helper to measure views that require a specific aspect ratio.
227 + * @author Jesper Borgstrup
228 + */
229 + public class ViewAspectRatioMeasurer {
230 +
231 + private double aspectRatio;
232 +
233 + public void setAspectRatio(double aspectRatio) {
234 + this.aspectRatio = aspectRatio;
235 + }
236 +
237 + public double getAspectRatio() {
238 + return this.aspectRatio;
239 + }
240 +
241 + /**
242 + * Measure with the aspect ratio given at construction.<br />
243 + * <br />
244 + * After measuring, get the width and height with the {@link #getMeasuredWidth()}
245 + * and {@link #getMeasuredHeight()} methods, respectively.
246 + * @param widthMeasureSpec The width <tt>MeasureSpec</tt> passed in your <tt>View.onMeasure()</tt> method
247 + * @param heightMeasureSpec The height <tt>MeasureSpec</tt> passed in your <tt>View.onMeasure()</tt> method
248 + */
249 + public void measure(int widthMeasureSpec, int heightMeasureSpec) {
250 + measure(widthMeasureSpec, heightMeasureSpec, this.aspectRatio);
251 + }
252 +
253 + /**
254 + * Measure with a specific aspect ratio<br />
255 + * <br />
256 + * After measuring, get the width and height with the {@link #getMeasuredWidth()}
257 + * and {@link #getMeasuredHeight()} methods, respectively.
258 + * @param widthMeasureSpec The width <tt>MeasureSpec</tt> passed in your <tt>View.onMeasure()</tt> method
259 + * @param heightMeasureSpec The height <tt>MeasureSpec</tt> passed in your <tt>View.onMeasure()</tt> method
260 + * @param aspectRatio The aspect ratio to calculate measurements in respect to
261 + */
262 + public void measure(int widthMeasureSpec, int heightMeasureSpec, double aspectRatio) {
263 + int widthMode = MeasureSpec.getMode( widthMeasureSpec );
264 + int widthSize = widthMode == MeasureSpec.UNSPECIFIED ? Integer.MAX_VALUE : MeasureSpec.getSize( widthMeasureSpec );
265 + int heightMode = MeasureSpec.getMode( heightMeasureSpec );
266 + int heightSize = heightMode == MeasureSpec.UNSPECIFIED ? Integer.MAX_VALUE : MeasureSpec.getSize( heightMeasureSpec );
267 +
268 + if ( heightMode == MeasureSpec.EXACTLY && widthMode == MeasureSpec.EXACTLY ) {
269 + /*
270 + * Possibility 1: Both width and height fixed
271 + */
272 + measuredWidth = widthSize;
273 + measuredHeight = heightSize;
274 +
275 + } else if ( heightMode == MeasureSpec.EXACTLY ) {
276 + /*
277 + * Possibility 2: Width dynamic, height fixed
278 + */
279 + measuredWidth = (int) Math.min( widthSize, heightSize * aspectRatio );
280 + measuredHeight = (int) (measuredWidth / aspectRatio);
281 +
282 + } else if ( widthMode == MeasureSpec.EXACTLY ) {
283 + /*
284 + * Possibility 3: Width fixed, height dynamic
285 + */
286 + measuredHeight = (int) Math.min( heightSize, widthSize / aspectRatio );
287 + measuredWidth = (int) (measuredHeight * aspectRatio);
288 +
289 + } else {
290 + /*
291 + * Possibility 4: Both width and height dynamic
292 + */
293 + if ( widthSize > heightSize * aspectRatio ) {
294 + measuredHeight = heightSize;
295 + measuredWidth = (int)( measuredHeight * aspectRatio );
296 + } else {
297 + measuredWidth = widthSize;
298 + measuredHeight = (int) (measuredWidth / aspectRatio);
299 + }
300 +
301 + }
302 + }
303 +
304 + private Integer measuredWidth = null;
305 + /**
306 + * Get the width measured in the latest call to <tt>measure()</tt>.
307 + */
308 + public int getMeasuredWidth() {
309 + if ( measuredWidth == null ) {
310 + throw new IllegalStateException( "You need to run measure() before trying to get measured dimensions" );
311 + }
312 + return measuredWidth;
313 + }
314 +
315 + private Integer measuredHeight = null;
316 + /**
317 + * Get the height measured in the latest call to <tt>measure()</tt>.
318 + */
319 + public int getMeasuredHeight() {
320 + if ( measuredHeight == null ) {
321 + throw new IllegalStateException( "You need to run measure() before trying to get measured dimensions" );
322 + }
323 + return measuredHeight;
324 + }
325 +
326 + }
327 +
328 +}
1 +/*
2 + * Based on the work of fadden
3 + *
4 + * Copyright 2012 Google Inc. All Rights Reserved.
5 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
6 + *
7 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
8 + *
9 + * Licensed under the Apache License, Version 2.0 (the "License");
10 + * you may not use this file except in compliance with the License.
11 + * You may obtain a copy of the License at
12 + *
13 + * http://www.apache.org/licenses/LICENSE-2.0
14 + *
15 + * Unless required by applicable law or agreed to in writing, software
16 + * distributed under the License is distributed on an "AS IS" BASIS,
17 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 + * See the License for the specific language governing permissions and
19 + * limitations under the License.
20 + */
21 +
22 +package net.majorkernelpanic.streaming.gl;
23 +
24 +import java.nio.ByteBuffer;
25 +import java.nio.ByteOrder;
26 +import java.nio.FloatBuffer;
27 +import android.annotation.SuppressLint;
28 +import android.graphics.SurfaceTexture;
29 +import android.opengl.GLES11Ext;
30 +import android.opengl.GLES20;
31 +import android.opengl.Matrix;
32 +import android.util.Log;
33 +
34 +/**
35 + * Code for rendering a texture onto a surface using OpenGL ES 2.0.
36 + */
37 +@SuppressLint("InlinedApi")
38 +public class TextureManager {
39 +
40 + public final static String TAG = "TextureManager";
41 +
42 + private static final int FLOAT_SIZE_BYTES = 4;
43 + private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
44 + private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
45 + private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
46 + private final float[] mTriangleVerticesData = {
47 + // X, Y, Z, U, V
48 + -1.0f, -1.0f, 0, 0.f, 0.f,
49 + 1.0f, -1.0f, 0, 1.f, 0.f,
50 + -1.0f, 1.0f, 0, 0.f, 1.f,
51 + 1.0f, 1.0f, 0, 1.f, 1.f,
52 + };
53 +
54 + private FloatBuffer mTriangleVertices;
55 +
56 + private static final String VERTEX_SHADER =
57 + "uniform mat4 uMVPMatrix;\n" +
58 + "uniform mat4 uSTMatrix;\n" +
59 + "attribute vec4 aPosition;\n" +
60 + "attribute vec4 aTextureCoord;\n" +
61 + "varying vec2 vTextureCoord;\n" +
62 + "void main() {\n" +
63 + " gl_Position = uMVPMatrix * aPosition;\n" +
64 + " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
65 + "}\n";
66 +
67 + private static final String FRAGMENT_SHADER =
68 + "#extension GL_OES_EGL_image_external : require\n" +
69 + "precision mediump float;\n" + // highp here doesn't seem to matter
70 + "varying vec2 vTextureCoord;\n" +
71 + "uniform samplerExternalOES sTexture;\n" +
72 + "void main() {\n" +
73 + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
74 + "}\n";
75 +
76 + private float[] mMVPMatrix = new float[16];
77 + private float[] mSTMatrix = new float[16];
78 +
79 + private int mProgram;
80 + private int mTextureID = -12345;
81 + private int muMVPMatrixHandle;
82 + private int muSTMatrixHandle;
83 + private int maPositionHandle;
84 + private int maTextureHandle;
85 +
86 + private SurfaceTexture mSurfaceTexture;
87 +
88 + public TextureManager() {
89 + mTriangleVertices = ByteBuffer.allocateDirect(
90 + mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
91 + .order(ByteOrder.nativeOrder()).asFloatBuffer();
92 + mTriangleVertices.put(mTriangleVerticesData).position(0);
93 +
94 + Matrix.setIdentityM(mSTMatrix, 0);
95 + }
96 +
97 + public int getTextureId() {
98 + return mTextureID;
99 + }
100 +
101 + public SurfaceTexture getSurfaceTexture() {
102 + return mSurfaceTexture;
103 + }
104 +
105 + public void updateFrame() {
106 + mSurfaceTexture.updateTexImage();
107 + }
108 +
109 + public void drawFrame() {
110 + checkGlError("onDrawFrame start");
111 + mSurfaceTexture.getTransformMatrix(mSTMatrix);
112 +
113 + //GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
114 + //GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
115 +
116 + GLES20.glUseProgram(mProgram);
117 + checkGlError("glUseProgram");
118 +
119 + GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
120 + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
121 + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
122 +
123 + mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
124 + GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
125 + TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
126 + checkGlError("glVertexAttribPointer maPosition");
127 + GLES20.glEnableVertexAttribArray(maPositionHandle);
128 + checkGlError("glEnableVertexAttribArray maPositionHandle");
129 +
130 + mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
131 + GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
132 + TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
133 + checkGlError("glVertexAttribPointer maTextureHandle");
134 + GLES20.glEnableVertexAttribArray(maTextureHandle);
135 + checkGlError("glEnableVertexAttribArray maTextureHandle");
136 +
137 + Matrix.setIdentityM(mMVPMatrix, 0);
138 + GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
139 + GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
140 +
141 + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
142 + checkGlError("glDrawArrays");
143 + GLES20.glFinish();
144 + }
145 +
146 + /**
147 + * Initializes GL state. Call this after the EGL surface has been created and made current.
148 + */
149 + public SurfaceTexture createTexture() {
150 + mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
151 + if (mProgram == 0) {
152 + throw new RuntimeException("failed creating program");
153 + }
154 + maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
155 + checkGlError("glGetAttribLocation aPosition");
156 + if (maPositionHandle == -1) {
157 + throw new RuntimeException("Could not get attrib location for aPosition");
158 + }
159 + maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
160 + checkGlError("glGetAttribLocation aTextureCoord");
161 + if (maTextureHandle == -1) {
162 + throw new RuntimeException("Could not get attrib location for aTextureCoord");
163 + }
164 +
165 + muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
166 + checkGlError("glGetUniformLocation uMVPMatrix");
167 + if (muMVPMatrixHandle == -1) {
168 + throw new RuntimeException("Could not get attrib location for uMVPMatrix");
169 + }
170 +
171 + muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
172 + checkGlError("glGetUniformLocation uSTMatrix");
173 + if (muSTMatrixHandle == -1) {
174 + throw new RuntimeException("Could not get attrib location for uSTMatrix");
175 + }
176 +
177 + int[] textures = new int[1];
178 + GLES20.glGenTextures(1, textures, 0);
179 +
180 + mTextureID = textures[0];
181 + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
182 + checkGlError("glBindTexture mTextureID");
183 +
184 + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
185 + GLES20.GL_NEAREST);
186 + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
187 + GLES20.GL_LINEAR);
188 + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
189 + GLES20.GL_CLAMP_TO_EDGE);
190 + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
191 + GLES20.GL_CLAMP_TO_EDGE);
192 + checkGlError("glTexParameter");
193 +
194 + mSurfaceTexture = new SurfaceTexture(mTextureID);
195 + return mSurfaceTexture;
196 + }
197 +
198 + public void release() {
199 + mSurfaceTexture = null;
200 + }
201 +
202 + /**
203 + * Replaces the fragment shader. Pass in null to reset to default.
204 + */
205 + public void changeFragmentShader(String fragmentShader) {
206 + if (fragmentShader == null) {
207 + fragmentShader = FRAGMENT_SHADER;
208 + }
209 + GLES20.glDeleteProgram(mProgram);
210 + mProgram = createProgram(VERTEX_SHADER, fragmentShader);
211 + if (mProgram == 0) {
212 + throw new RuntimeException("failed creating program");
213 + }
214 + }
215 +
216 + private int loadShader(int shaderType, String source) {
217 + int shader = GLES20.glCreateShader(shaderType);
218 + checkGlError("glCreateShader type=" + shaderType);
219 + GLES20.glShaderSource(shader, source);
220 + GLES20.glCompileShader(shader);
221 + int[] compiled = new int[1];
222 + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
223 + if (compiled[0] == 0) {
224 + Log.e(TAG, "Could not compile shader " + shaderType + ":");
225 + Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
226 + GLES20.glDeleteShader(shader);
227 + shader = 0;
228 + }
229 + return shader;
230 + }
231 +
232 + private int createProgram(String vertexSource, String fragmentSource) {
233 + int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
234 + if (vertexShader == 0) {
235 + return 0;
236 + }
237 + int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
238 + if (pixelShader == 0) {
239 + return 0;
240 + }
241 +
242 + int program = GLES20.glCreateProgram();
243 + checkGlError("glCreateProgram");
244 + if (program == 0) {
245 + Log.e(TAG, "Could not create program");
246 + }
247 + GLES20.glAttachShader(program, vertexShader);
248 + checkGlError("glAttachShader");
249 + GLES20.glAttachShader(program, pixelShader);
250 + checkGlError("glAttachShader");
251 + GLES20.glLinkProgram(program);
252 + int[] linkStatus = new int[1];
253 + GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
254 + if (linkStatus[0] != GLES20.GL_TRUE) {
255 + Log.e(TAG, "Could not link program: ");
256 + Log.e(TAG, GLES20.glGetProgramInfoLog(program));
257 + GLES20.glDeleteProgram(program);
258 + program = 0;
259 + }
260 + return program;
261 + }
262 +
263 + public void checkGlError(String op) {
264 + int error;
265 + while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
266 + Log.e(TAG, op + ": glError " + error);
267 + throw new RuntimeException(op + ": glError " + error);
268 + }
269 + }
270 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.hw;
20 +
21 +import java.util.ArrayList;
22 +import java.util.HashSet;
23 +import java.util.Set;
24 +import android.annotation.SuppressLint;
25 +import android.media.MediaCodecInfo;
26 +import android.media.MediaCodecList;
27 +import android.util.Log;
28 +
29 +@SuppressLint("InlinedApi")
30 +public class CodecManager {
31 +
32 + public final static String TAG = "CodecManager";
33 +
34 + public static final int[] SUPPORTED_COLOR_FORMATS = {
35 + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
36 + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar,
37 + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
38 + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar,
39 + MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar
40 + };
41 +
42 + private static Codec[] sEncoders = null;
43 + private static Codec[] sDecoders = null;
44 +
45 + static class Codec {
46 + public Codec(String name, Integer[] formats) {
47 + this.name = name;
48 + this.formats = formats;
49 + }
50 + public String name;
51 + public Integer[] formats;
52 + }
53 +
54 + /**
55 + * Lists all encoders that claim to support a color format that we know how to use.
56 + * @return A list of those encoders
57 + */
58 + @SuppressLint("NewApi")
59 + public synchronized static Codec[] findEncodersForMimeType(String mimeType) {
60 + if (sEncoders != null) return sEncoders;
61 +
62 + ArrayList<Codec> encoders = new ArrayList<>();
63 +
64 + // We loop through the encoders, apparently this can take up to a sec (testes on a GS3)
65 + for(int j = MediaCodecList.getCodecCount() - 1; j >= 0; j--){
66 + MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(j);
67 + if (!codecInfo.isEncoder()) continue;
68 +
69 + String[] types = codecInfo.getSupportedTypes();
70 + for (int i = 0; i < types.length; i++) {
71 + if (types[i].equalsIgnoreCase(mimeType)) {
72 + try {
73 + MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
74 + Set<Integer> formats = new HashSet<>();
75 +
76 + // And through the color formats supported
77 + for (int k = 0; k < capabilities.colorFormats.length; k++) {
78 + int format = capabilities.colorFormats[k];
79 +
80 + for (int l=0;l<SUPPORTED_COLOR_FORMATS.length;l++) {
81 + if (format == SUPPORTED_COLOR_FORMATS[l]) {
82 + formats.add(format);
83 + }
84 + }
85 + }
86 +
87 + Codec codec = new Codec(codecInfo.getName(), (Integer[]) formats.toArray(new Integer[formats.size()]));
88 + encoders.add(codec);
89 + } catch (Exception e) {
90 + Log.wtf(TAG,e);
91 + }
92 + }
93 + }
94 + }
95 +
96 + sEncoders = (Codec[]) encoders.toArray(new Codec[encoders.size()]);
97 + return sEncoders;
98 +
99 + }
100 +
101 + /**
102 + * Lists all decoders that claim to support a color format that we know how to use.
103 + * @return A list of those decoders
104 + */
105 + @SuppressLint("NewApi")
106 + public synchronized static Codec[] findDecodersForMimeType(String mimeType) {
107 + if (sDecoders != null) return sDecoders;
108 + ArrayList<Codec> decoders = new ArrayList<>();
109 +
110 + // We loop through the decoders, apparently this can take up to a sec (testes on a GS3)
111 + for(int j = MediaCodecList.getCodecCount() - 1; j >= 0; j--){
112 + MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(j);
113 + if (codecInfo.isEncoder()) continue;
114 +
115 + String[] types = codecInfo.getSupportedTypes();
116 + for (int i = 0; i < types.length; i++) {
117 + if (types[i].equalsIgnoreCase(mimeType)) {
118 + try {
119 + MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
120 + Set<Integer> formats = new HashSet<>();
121 +
122 + // And through the color formats supported
123 + for (int k = 0; k < capabilities.colorFormats.length; k++) {
124 + int format = capabilities.colorFormats[k];
125 +
126 + for (int l=0;l<SUPPORTED_COLOR_FORMATS.length;l++) {
127 + if (format == SUPPORTED_COLOR_FORMATS[l]) {
128 + formats.add(format);
129 + }
130 + }
131 + }
132 +
133 + Codec codec = new Codec(codecInfo.getName(), (Integer[]) formats.toArray(new Integer[formats.size()]));
134 + decoders.add(codec);
135 + } catch (Exception e) {
136 + Log.wtf(TAG,e);
137 + }
138 + }
139 + }
140 + }
141 +
142 + sDecoders = (Codec[]) decoders.toArray(new Codec[decoders.size()]);
143 +
144 + // We will use the decoder from google first, it seems to work properly on many phones
145 + for (int i=0;i<sDecoders.length;i++) {
146 + if (sDecoders[i].name.equalsIgnoreCase("omx.google.h264.decoder")) {
147 + Codec codec = sDecoders[0];
148 + sDecoders[0] = sDecoders[i];
149 + sDecoders[i] = codec;
150 + }
151 + }
152 +
153 + return sDecoders;
154 + }
155 +
156 +}
157 +
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.hw;
20 +
21 +import java.io.IOException;
22 +import java.io.PrintWriter;
23 +import java.io.StringWriter;
24 +import java.nio.ByteBuffer;
25 +import net.majorkernelpanic.streaming.hw.CodecManager.Codec;
26 +import android.annotation.SuppressLint;
27 +import android.content.Context;
28 +import android.content.SharedPreferences;
29 +import android.content.SharedPreferences.Editor;
30 +import android.media.MediaCodec;
31 +import android.media.MediaCodec.BufferInfo;
32 +import android.media.MediaCodecInfo;
33 +import android.media.MediaFormat;
34 +import android.os.Build;
35 +import android.preference.PreferenceManager;
36 +import android.util.Base64;
37 +import android.util.Log;
38 +
39 +/**
40 + *
41 + * The purpose of this class is to detect and by-pass some bugs (or underspecified configuration) that
42 + * encoders available through the MediaCodec API may have. <br />
43 + * Feeding the encoder with a surface is not tested here.
44 + * Some bugs you may have encountered:<br />
45 + * <ul>
46 + * <li>U and V panes reversed</li>
47 + * <li>Some padding is needed after the Y pane</li>
48 + * <li>stride!=width or slice-height!=height</li>
49 + * </ul>
50 + */
51 +@SuppressLint("NewApi")
52 +public class EncoderDebugger {
53 +
54 + public final static String TAG = "EncoderDebugger";
55 +
56 + /** Prefix that will be used for all shared preferences saved by libstreaming. */
57 + private static final String PREF_PREFIX = "libstreaming-";
58 +
59 + /**
60 + * If this is set to false the test will be run only once and the result
61 + * will be saved in the shared preferences.
62 + */
63 + private static final boolean DEBUG = false;
64 +
65 + /** Set this to true to see more logs. */
66 + private static final boolean VERBOSE = false;
67 +
68 + /** Will be incremented every time this test is modified. */
69 + private static final int VERSION = 3;
70 +
71 + /** Bit rate that will be used with the encoder. */
72 + private final static int BITRATE = 1000000;
73 +
74 + /** Frame rate that will be used to test the encoder. */
75 + private final static int FRAMERATE = 20;
76 +
77 + private final static String MIME_TYPE = "video/avc";
78 +
79 + private final static int NB_DECODED = 34;
80 + private final static int NB_ENCODED = 50;
81 +
82 + private int mDecoderColorFormat, mEncoderColorFormat;
83 + private String mDecoderName, mEncoderName, mErrorLog;
84 + private MediaCodec mEncoder, mDecoder;
85 + private int mWidth, mHeight, mSize;
86 + private byte[] mSPS, mPPS;
87 + private byte[] mData, mInitialImage;
88 + private MediaFormat mDecOutputFormat;
89 + private NV21Convertor mNV21;
90 + private SharedPreferences mPreferences;
91 + private byte[][] mVideo, mDecodedVideo;
92 + private String mB64PPS, mB64SPS;
93 +
94 + public synchronized static void asyncDebug(final Context context, final int width, final int height) {
95 + new Thread(new Runnable() {
96 + @Override
97 + public void run() {
98 + try {
99 + SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
100 + debug(prefs, width, height);
101 + } catch (Exception e) {}
102 + }
103 + }).start();
104 + }
105 +
106 + public synchronized static EncoderDebugger debug(Context context, int width, int height) {
107 + SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
108 + return debug(prefs, width, height);
109 + }
110 +
111 + public synchronized static EncoderDebugger debug(SharedPreferences prefs, int width, int height) {
112 + EncoderDebugger debugger = new EncoderDebugger(prefs, width, height);
113 + debugger.debug();
114 + return debugger;
115 + }
116 +
117 + public String getB64PPS() {
118 + return mB64PPS;
119 + }
120 +
121 + public String getB64SPS() {
122 + return mB64SPS;
123 + }
124 +
125 + public String getEncoderName() {
126 + return mEncoderName;
127 + }
128 +
129 + public int getEncoderColorFormat() {
130 + return mEncoderColorFormat;
131 + }
132 +
133 + /** This {@link NV21Convertor} will do the necessary work to feed properly the encoder. */
134 + public NV21Convertor getNV21Convertor() {
135 + return mNV21;
136 + }
137 +
138 + /** A log of all the errors that occurred during the test. */
139 + public String getErrorLog() {
140 + return mErrorLog;
141 + }
142 +
143 + private EncoderDebugger(SharedPreferences prefs, int width, int height) {
144 + mPreferences = prefs;
145 + mWidth = width;
146 + mHeight = height;
147 + mSize = width*height;
148 + reset();
149 + }
150 +
151 + private void reset() {
152 + mNV21 = new NV21Convertor();
153 + mVideo = new byte[NB_ENCODED][];
154 + mDecodedVideo = new byte[NB_DECODED][];
155 + mErrorLog = "";
156 + mPPS = null;
157 + mSPS = null;
158 + }
159 +
160 + private void debug() {
161 +
162 + // If testing the phone again is not needed,
163 + // we just restore the result from the shared preferences
164 + if (!checkTestNeeded()) {
165 + String resolution = mWidth+"x"+mHeight+"-";
166 +
167 + boolean success = mPreferences.getBoolean(PREF_PREFIX+resolution+"success",false);
168 + if (!success) {
169 + throw new RuntimeException("Phone not supported with this resolution ("+mWidth+"x"+mHeight+")");
170 + }
171 +
172 + mNV21.setSize(mWidth, mHeight);
173 + mNV21.setSliceHeigth(mPreferences.getInt(PREF_PREFIX+resolution+"sliceHeight", 0));
174 + mNV21.setStride(mPreferences.getInt(PREF_PREFIX+resolution+"stride", 0));
175 + mNV21.setYPadding(mPreferences.getInt(PREF_PREFIX+resolution+"padding", 0));
176 + mNV21.setPlanar(mPreferences.getBoolean(PREF_PREFIX+resolution+"planar", false));
177 + mNV21.setColorPanesReversed(mPreferences.getBoolean(PREF_PREFIX+resolution+"reversed", false));
178 + mEncoderName = mPreferences.getString(PREF_PREFIX+resolution+"encoderName", "");
179 + mEncoderColorFormat = mPreferences.getInt(PREF_PREFIX+resolution+"colorFormat", 0);
180 + mB64PPS = mPreferences.getString(PREF_PREFIX+resolution+"pps", "");
181 + mB64SPS = mPreferences.getString(PREF_PREFIX+resolution+"sps", "");
182 +
183 + return;
184 + }
185 +
186 + if (VERBOSE) Log.d(TAG, ">>>> Testing the phone for resolution "+mWidth+"x"+mHeight);
187 +
188 + // Builds a list of available encoders and decoders we may be able to use
189 + // because they support some nice color formats
190 + Codec[] encoders = CodecManager.findEncodersForMimeType(MIME_TYPE);
191 + Codec[] decoders = CodecManager.findDecodersForMimeType(MIME_TYPE);
192 +
193 + int count = 0, n = 1;
194 + for (int i=0;i<encoders.length;i++) {
195 + count += encoders[i].formats.length;
196 + }
197 +
198 + // Tries available encoders
199 + for (int i=0;i<encoders.length;i++) {
200 + for (int j=0;j<encoders[i].formats.length;j++) {
201 + reset();
202 +
203 + mEncoderName = encoders[i].name;
204 + mEncoderColorFormat = encoders[i].formats[j];
205 +
206 + if (VERBOSE) Log.v(TAG, ">> Test "+(n++)+"/"+count+": "+mEncoderName+" with color format "+mEncoderColorFormat+" at "+mWidth+"x"+mHeight);
207 +
208 + // Converts from NV21 to YUV420 with the specified parameters
209 + mNV21.setSize(mWidth, mHeight);
210 + mNV21.setSliceHeigth(mHeight);
211 + mNV21.setStride(mWidth);
212 + mNV21.setYPadding(0);
213 + mNV21.setEncoderColorFormat(mEncoderColorFormat);
214 +
215 + // /!\ NV21Convertor can directly modify the input
216 + createTestImage();
217 + mData = mNV21.convert(mInitialImage);
218 +
219 + try {
220 +
221 + // Starts the encoder
222 + configureEncoder();
223 + searchSPSandPPS();
224 +
225 + if (VERBOSE) Log.v(TAG, "SPS and PPS in b64: SPS="+mB64SPS+", PPS="+mB64PPS);
226 +
227 + // Feeds the encoder with an image repeatedly to produce some NAL units
228 + encode();
229 +
230 + // We now try to decode the NALs with decoders available on the phone
231 + boolean decoded = false;
232 + for (int k=0;k<decoders.length && !decoded;k++) {
233 + for (int l=0;l<decoders[k].formats.length && !decoded;l++) {
234 + mDecoderName = decoders[k].name;
235 + mDecoderColorFormat = decoders[k].formats[l];
236 + try {
237 + configureDecoder();
238 + } catch (Exception e) {
239 + if (VERBOSE) Log.d(TAG, mDecoderName+" can't be used with "+mDecoderColorFormat+" at "+mWidth+"x"+mHeight);
240 + releaseDecoder();
241 + break;
242 + }
243 + try {
244 + decode(true);
245 + if (VERBOSE) Log.d(TAG, mDecoderName+" successfully decoded the NALs (color format "+mDecoderColorFormat+")");
246 + decoded = true;
247 + } catch (Exception e) {
248 + if (VERBOSE) Log.e(TAG, mDecoderName+" failed to decode the NALs");
249 + e.printStackTrace();
250 + } finally {
251 + releaseDecoder();
252 + }
253 + }
254 + }
255 +
256 + if (!decoded) throw new RuntimeException("Failed to decode NALs from the encoder.");
257 +
258 + // Compares the image before and after
259 + if (!compareLumaPanes()) {
260 + // TODO: try again with a different stride
261 + // TODO: try again with the "stride" param
262 + throw new RuntimeException("It is likely that stride!=width");
263 + }
264 +
265 + int padding;
266 + if ((padding = checkPaddingNeeded())>0) {
267 + if (padding<4096) {
268 + if (VERBOSE) Log.d(TAG, "Some padding is needed: "+padding);
269 + mNV21.setYPadding(padding);
270 + createTestImage();
271 + mData = mNV21.convert(mInitialImage);
272 + encodeDecode();
273 + } else {
274 + // TODO: try again with a different sliceHeight
275 + // TODO: try again with the "slice-height" param
276 + throw new RuntimeException("It is likely that sliceHeight!=height");
277 + }
278 + }
279 +
280 + createTestImage();
281 + if (!compareChromaPanes(false)) {
282 + if (compareChromaPanes(true)) {
283 + mNV21.setColorPanesReversed(true);
284 + if (VERBOSE) Log.d(TAG, "U and V pane are reversed");
285 + } else {
286 + throw new RuntimeException("Incorrect U or V pane...");
287 + }
288 + }
289 +
290 + saveTestResult(true);
291 + Log.v(TAG, "The encoder "+mEncoderName+" is usable with resolution "+mWidth+"x"+mHeight);
292 + return;
293 +
294 + } catch (Exception e) {
295 + StringWriter sw = new StringWriter();
296 + PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw);
297 + String stack = sw.toString();
298 + String str = "Encoder "+mEncoderName+" cannot be used with color format "+mEncoderColorFormat;
299 + if (VERBOSE) Log.e(TAG, str, e);
300 + mErrorLog += str + "\n" + stack;
301 + e.printStackTrace();
302 + } finally {
303 + releaseEncoder();
304 + }
305 +
306 + }
307 + }
308 +
309 + saveTestResult(false);
310 + Log.e(TAG,"No usable encoder were found on the phone for resolution "+mWidth+"x"+mHeight);
311 + throw new RuntimeException("No usable encoder were found on the phone for resolution "+mWidth+"x"+mHeight);
312 +
313 + }
314 +
315 + private boolean checkTestNeeded() {
316 + String resolution = mWidth+"x"+mHeight+"-";
317 +
318 + // Forces the test
319 + if (DEBUG || mPreferences==null) return true;
320 +
321 + // If the sdk has changed on the phone, or the version of the test
322 + // it has to be run again
323 + if (mPreferences.contains(PREF_PREFIX+resolution+"lastSdk")) {
324 + int lastSdk = mPreferences.getInt(PREF_PREFIX+resolution+"lastSdk", 0);
325 + int lastVersion = mPreferences.getInt(PREF_PREFIX+resolution+"lastVersion", 0);
326 + if (Build.VERSION.SDK_INT>lastSdk || VERSION>lastVersion) {
327 + return true;
328 + }
329 + } else {
330 + return true;
331 + }
332 + return false;
333 + }
334 +
335 +
336 + /**
337 + * Saves the result of the test in the shared preferences,
338 + * we will run it again only if the SDK has changed on the phone,
339 + * or if this test has been modified.
340 + */
341 + private void saveTestResult(boolean success) {
342 + String resolution = mWidth+"x"+mHeight+"-";
343 + Editor editor = mPreferences.edit();
344 +
345 + editor.putBoolean(PREF_PREFIX+resolution+"success", success);
346 +
347 + if (success) {
348 + editor.putInt(PREF_PREFIX+resolution+"lastSdk", Build.VERSION.SDK_INT);
349 + editor.putInt(PREF_PREFIX+resolution+"lastVersion", VERSION);
350 + editor.putInt(PREF_PREFIX+resolution+"sliceHeight", mNV21.getSliceHeigth());
351 + editor.putInt(PREF_PREFIX+resolution+"stride", mNV21.getStride());
352 + editor.putInt(PREF_PREFIX+resolution+"padding", mNV21.getYPadding());
353 + editor.putBoolean(PREF_PREFIX+resolution+"planar", mNV21.getPlanar());
354 + editor.putBoolean(PREF_PREFIX+resolution+"reversed", mNV21.getUVPanesReversed());
355 + editor.putString(PREF_PREFIX+resolution+"encoderName", mEncoderName);
356 + editor.putInt(PREF_PREFIX+resolution+"colorFormat", mEncoderColorFormat);
357 + editor.putString(PREF_PREFIX+resolution+"encoderName", mEncoderName);
358 + editor.putString(PREF_PREFIX+resolution+"pps", mB64PPS);
359 + editor.putString(PREF_PREFIX+resolution+"sps", mB64SPS);
360 + }
361 +
362 + editor.commit();
363 + }
364 +
365 + /**
366 + * Creates the test image that will be used to feed the encoder.
367 + */
368 + private void createTestImage() {
369 + mInitialImage = new byte[3*mSize/2];
370 + for (int i=0;i<mSize;i++) {
371 + mInitialImage[i] = (byte) (40+i%199);
372 + }
373 + for (int i=mSize;i<3*mSize/2;i+=2) {
374 + mInitialImage[i] = (byte) (40+i%200);
375 + mInitialImage[i+1] = (byte) (40+(i+99)%200);
376 + }
377 +
378 + }
379 +
380 + /**
381 + * Compares the Y pane of the initial image, and the Y pane
382 + * after having encoded & decoded the image.
383 + */
384 + private boolean compareLumaPanes() {
385 + int d, e, f = 0;
386 + for (int j=0;j<NB_DECODED;j++) {
387 + for (int i=0;i<mSize;i+=10) {
388 + d = (mInitialImage[i]&0xFF) - (mDecodedVideo[j][i]&0xFF);
389 + e = (mInitialImage[i+1]&0xFF) - (mDecodedVideo[j][i+1]&0xFF);
390 + d = d<0 ? -d : d;
391 + e = e<0 ? -e : e;
392 + if (d>50 && e>50) {
393 + mDecodedVideo[j] = null;
394 + f++;
395 + break;
396 + }
397 + }
398 + }
399 + return f<=NB_DECODED/2;
400 + }
401 +
402 + private int checkPaddingNeeded() {
403 + int i = 0, j = 3*mSize/2-1, max = 0;
404 + int[] r = new int[NB_DECODED];
405 + for (int k=0;k<NB_DECODED;k++) {
406 + if (mDecodedVideo[k] != null) {
407 + i = 0;
408 + while (i<j && (mDecodedVideo[k][j-i]&0xFF)<50) i+=2;
409 + if (i>0) {
410 + r[k] = ((i>>6)<<6);
411 + max = r[k]>max ? r[k] : max;
412 + if (VERBOSE) Log.e(TAG,"Padding needed: "+r[k]);
413 + } else {
414 + if (VERBOSE) Log.v(TAG,"No padding needed.");
415 + }
416 + }
417 + }
418 +
419 + return ((max>>6)<<6);
420 + }
421 +
422 + /**
423 + * Compares the U or V pane of the initial image, and the U or V pane
424 + * after having encoded & decoded the image.
425 + */
426 + private boolean compareChromaPanes(boolean crossed) {
427 + int d, f = 0;
428 +
429 + for (int j=0;j<NB_DECODED;j++) {
430 + if (mDecodedVideo[j] != null) {
431 + // We compare the U and V pane before and after
432 + if (!crossed) {
433 + for (int i=mSize;i<3*mSize/2;i+=1) {
434 + d = (mInitialImage[i]&0xFF) - (mDecodedVideo[j][i]&0xFF);
435 + d = d<0 ? -d : d;
436 + if (d>50) {
437 + //if (VERBOSE) Log.e(TAG,"BUG "+(i-mSize)+" d "+d);
438 + f++;
439 + break;
440 + }
441 + }
442 +
443 + // We compare the V pane before with the U pane after
444 + } else {
445 + for (int i=mSize;i<3*mSize/2;i+=2) {
446 + d = (mInitialImage[i]&0xFF) - (mDecodedVideo[j][i+1]&0xFF);
447 + d = d<0 ? -d : d;
448 + if (d>50) {
449 + f++;
450 + }
451 + }
452 + }
453 + }
454 + }
455 + return f<=NB_DECODED/2;
456 + }
457 +
458 + /**
459 + * Converts the image obtained from the decoder to NV21.
460 + */
461 + private void convertToNV21(int k) {
462 + byte[] buffer = new byte[3*mSize/2];
463 +
464 + int stride = mWidth, sliceHeight = mHeight;
465 + int colorFormat = mDecoderColorFormat;
466 + boolean planar = false;
467 +
468 + if (mDecOutputFormat != null) {
469 + MediaFormat format = mDecOutputFormat;
470 + if (format != null) {
471 + if (format.containsKey("slice-height")) {
472 + sliceHeight = format.getInteger("slice-height");
473 + if (sliceHeight<mHeight) sliceHeight = mHeight;
474 + }
475 + if (format.containsKey("stride")) {
476 + stride = format.getInteger("stride");
477 + if (stride<mWidth) stride = mWidth;
478 + }
479 + if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT) && format.getInteger(MediaFormat.KEY_COLOR_FORMAT)>0) {
480 + colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
481 + }
482 + }
483 + }
484 +
485 + switch (colorFormat) {
486 + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
487 + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
488 + case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
489 + planar = false;
490 + break;
491 + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
492 + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
493 + planar = true;
494 + break;
495 + }
496 +
497 + for (int i=0;i<mSize;i++) {
498 + if (i%mWidth==0) i+=stride-mWidth;
499 + buffer[i] = mDecodedVideo[k][i];
500 + }
501 +
502 + if (!planar) {
503 + for (int i=0,j=0;j<mSize/4;i+=1,j+=1) {
504 + if (i%mWidth/2==0) i+=(stride-mWidth)/2;
505 + buffer[mSize+2*j+1] = mDecodedVideo[k][stride*sliceHeight+2*i];
506 + buffer[mSize+2*j] = mDecodedVideo[k][stride*sliceHeight+2*i+1];
507 + }
508 + } else {
509 + for (int i=0,j=0;j<mSize/4;i+=1,j+=1) {
510 + if (i%mWidth/2==0) i+=(stride-mWidth)/2;
511 + buffer[mSize+2*j+1] = mDecodedVideo[k][stride*sliceHeight+i];
512 + buffer[mSize+2*j] = mDecodedVideo[k][stride*sliceHeight*5/4+i];
513 + }
514 + }
515 +
516 + mDecodedVideo[k] = buffer;
517 +
518 + }
519 +
520 + /**
521 + * Instantiates and starts the encoder.
522 + * @throws IOException The encoder cannot be configured
523 + */
524 + private void configureEncoder() throws IOException {
525 + mEncoder = MediaCodec.createByCodecName(mEncoderName);
526 + MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
527 + mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BITRATE);
528 + mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMERATE);
529 + mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mEncoderColorFormat);
530 + mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
531 + mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
532 + mEncoder.start();
533 + }
534 +
535 + private void releaseEncoder() {
536 + if (mEncoder != null) {
537 + try {
538 + mEncoder.stop();
539 + } catch (Exception ignore) {}
540 + try {
541 + mEncoder.release();
542 + } catch (Exception ignore) {}
543 + }
544 + }
545 +
546 + /**
547 + * Instantiates and starts the decoder.
548 + * @throws IOException The decoder cannot be configured
549 + */
550 + private void configureDecoder() throws IOException {
551 + byte[] prefix = new byte[] {0x00,0x00,0x00,0x01};
552 +
553 + ByteBuffer csd0 = ByteBuffer.allocate(4+mSPS.length+4+mPPS.length);
554 + csd0.put(new byte[] {0x00,0x00,0x00,0x01});
555 + csd0.put(mSPS);
556 + csd0.put(new byte[] {0x00,0x00,0x00,0x01});
557 + csd0.put(mPPS);
558 +
559 + mDecoder = MediaCodec.createByCodecName(mDecoderName);
560 + MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
561 + mediaFormat.setByteBuffer("csd-0", csd0);
562 + mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mDecoderColorFormat);
563 + mDecoder.configure(mediaFormat, null, null, 0);
564 + mDecoder.start();
565 +
566 + ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers();
567 +
568 + int decInputIndex = mDecoder.dequeueInputBuffer(1000000/FRAMERATE);
569 + if (decInputIndex>=0) {
570 + decInputBuffers[decInputIndex].clear();
571 + decInputBuffers[decInputIndex].put(prefix);
572 + decInputBuffers[decInputIndex].put(mSPS);
573 + mDecoder.queueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].position(), timestamp(), 0);
574 + } else {
575 + if (VERBOSE) Log.e(TAG,"No buffer available !");
576 + }
577 +
578 + decInputIndex = mDecoder.dequeueInputBuffer(1000000/FRAMERATE);
579 + if (decInputIndex>=0) {
580 + decInputBuffers[decInputIndex].clear();
581 + decInputBuffers[decInputIndex].put(prefix);
582 + decInputBuffers[decInputIndex].put(mPPS);
583 + mDecoder.queueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].position(), timestamp(), 0);
584 + } else {
585 + if (VERBOSE) Log.e(TAG,"No buffer available !");
586 + }
587 +
588 +
589 + }
590 +
591 + private void releaseDecoder() {
592 + if (mDecoder != null) {
593 + try {
594 + mDecoder.stop();
595 + } catch (Exception ignore) {}
596 + try {
597 + mDecoder.release();
598 + } catch (Exception ignore) {}
599 + }
600 + }
601 +
602 + /**
603 + * Tries to obtain the SPS and the PPS for the encoder.
604 + */
605 + private long searchSPSandPPS() {
606 +
607 + ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
608 + ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
609 + BufferInfo info = new BufferInfo();
610 + byte[] csd = new byte[128];
611 + int len = 0, p = 4, q = 4;
612 + long elapsed = 0, now = timestamp();
613 +
614 + while (elapsed<3000000 && (mSPS==null || mPPS==null)) {
615 +
616 + // Some encoders won't give us the SPS and PPS unless they receive something to encode first...
617 + int bufferIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
618 + if (bufferIndex>=0) {
619 + check(inputBuffers[bufferIndex].capacity()>=mData.length, "The input buffer is not big enough.");
620 + inputBuffers[bufferIndex].clear();
621 + inputBuffers[bufferIndex].put(mData, 0, mData.length);
622 + mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
623 + } else {
624 + if (VERBOSE) Log.e(TAG,"No buffer available !");
625 + }
626 +
627 + // We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
628 + // encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
629 + // But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...
630 +
631 + int index = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
632 +
633 + if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
634 +
635 + // The PPS and PPS shoud be there
636 + MediaFormat format = mEncoder.getOutputFormat();
637 + ByteBuffer spsb = format.getByteBuffer("csd-0");
638 + ByteBuffer ppsb = format.getByteBuffer("csd-1");
639 + mSPS = new byte[spsb.capacity()-4];
640 + spsb.position(4);
641 + spsb.get(mSPS,0,mSPS.length);
642 + mPPS = new byte[ppsb.capacity()-4];
643 + ppsb.position(4);
644 + ppsb.get(mPPS,0,mPPS.length);
645 + break;
646 +
647 + } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
648 + outputBuffers = mEncoder.getOutputBuffers();
649 + } else if (index>=0) {
650 +
651 + len = info.size;
652 + if (len<128) {
653 + outputBuffers[index].get(csd,0,len);
654 + if (len>0 && csd[0]==0 && csd[1]==0 && csd[2]==0 && csd[3]==1) {
655 + // Parses the SPS and PPS, they could be in two different packets and in a different order
656 + //depending on the phone so we don't make any assumption about that
657 + while (p<len) {
658 + while (!(csd[p+0]==0 && csd[p+1]==0 && csd[p+2]==0 && csd[p+3]==1) && p+3<len) p++;
659 + if (p+3>=len) p=len;
660 + if ((csd[q]&0x1F)==7) {
661 + mSPS = new byte[p-q];
662 + System.arraycopy(csd, q, mSPS, 0, p-q);
663 + } else {
664 + mPPS = new byte[p-q];
665 + System.arraycopy(csd, q, mPPS, 0, p-q);
666 + }
667 + p += 4;
668 + q = p;
669 + }
670 + }
671 + }
672 + mEncoder.releaseOutputBuffer(index, false);
673 + }
674 +
675 + elapsed = timestamp() - now;
676 + }
677 +
678 + check(mPPS != null && mSPS != null, "Could not determine the SPS & PPS.");
679 + mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
680 + mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);
681 +
682 + return elapsed;
683 + }
684 +
685 + private long encode() {
686 + int n = 0;
687 + long elapsed = 0, now = timestamp();
688 + int encOutputIndex = 0, encInputIndex = 0;
689 + BufferInfo info = new BufferInfo();
690 + ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
691 + ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();
692 +
693 + while (elapsed<5000000) {
694 + // Feeds the encoder with an image
695 + encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
696 + if (encInputIndex>=0) {
697 + check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
698 + encInputBuffers[encInputIndex].clear();
699 + encInputBuffers[encInputIndex].put(mData, 0, mData.length);
700 + mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
701 + } else {
702 + if (VERBOSE) Log.d(TAG,"No buffer available !");
703 + }
704 +
705 + // Tries to get a NAL unit
706 + encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
707 + if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
708 + encOutputBuffers = mEncoder.getOutputBuffers();
709 + } else if (encOutputIndex>=0) {
710 + mVideo[n] = new byte[info.size];
711 + encOutputBuffers[encOutputIndex].clear();
712 + encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
713 + mEncoder.releaseOutputBuffer(encOutputIndex, false);
714 + if (n>=NB_ENCODED) {
715 + flushMediaCodec(mEncoder);
716 + return elapsed;
717 + }
718 + }
719 +
720 + elapsed = timestamp() - now;
721 + }
722 +
723 + throw new RuntimeException("The encoder is too slow.");
724 +
725 + }
726 +
727 + /**
728 + * @param withPrefix If set to true, the decoder will be fed with NALs preceeded with 0x00000001.
729 + * @return How long it took to decode all the NALs
730 + */
731 + private long decode(boolean withPrefix) {
732 + int n = 0, i = 0, j = 0;
733 + long elapsed = 0, now = timestamp();
734 + int decInputIndex = 0, decOutputIndex = 0;
735 + ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers();
736 + ByteBuffer[] decOutputBuffers = mDecoder.getOutputBuffers();
737 + BufferInfo info = new BufferInfo();
738 +
739 + while (elapsed<3000000) {
740 +
741 + // Feeds the decoder with a NAL unit
742 + if (i<NB_ENCODED) {
743 + decInputIndex = mDecoder.dequeueInputBuffer(1000000/FRAMERATE);
744 + if (decInputIndex>=0) {
745 + int l1 = decInputBuffers[decInputIndex].capacity();
746 + int l2 = mVideo[i].length;
747 + decInputBuffers[decInputIndex].clear();
748 +
749 + if ((withPrefix && hasPrefix(mVideo[i])) || (!withPrefix && !hasPrefix(mVideo[i]))) {
750 + check(l1>=l2, "The decoder input buffer is not big enough (nal="+l2+", capacity="+l1+").");
751 + decInputBuffers[decInputIndex].put(mVideo[i],0,mVideo[i].length);
752 + } else if (withPrefix && !hasPrefix(mVideo[i])) {
753 + check(l1>=l2+4, "The decoder input buffer is not big enough (nal="+(l2+4)+", capacity="+l1+").");
754 + decInputBuffers[decInputIndex].put(new byte[] {0,0,0,1});
755 + decInputBuffers[decInputIndex].put(mVideo[i],0,mVideo[i].length);
756 + } else if (!withPrefix && hasPrefix(mVideo[i])) {
757 + check(l1>=l2-4, "The decoder input buffer is not big enough (nal="+(l2-4)+", capacity="+l1+").");
758 + decInputBuffers[decInputIndex].put(mVideo[i],4,mVideo[i].length-4);
759 + }
760 +
761 + mDecoder.queueInputBuffer(decInputIndex, 0, l2, timestamp(), 0);
762 + i++;
763 + } else {
764 + if (VERBOSE) Log.d(TAG,"No buffer available !");
765 + }
766 + }
767 +
768 + // Tries to get a decoded image
769 + decOutputIndex = mDecoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
770 + if (decOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
771 + decOutputBuffers = mDecoder.getOutputBuffers();
772 + } else if (decOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
773 + mDecOutputFormat = mDecoder.getOutputFormat();
774 + } else if (decOutputIndex>=0) {
775 + if (n>2) {
776 + // We have successfully encoded and decoded an image !
777 + int length = info.size;
778 + mDecodedVideo[j] = new byte[length];
779 + decOutputBuffers[decOutputIndex].clear();
780 + decOutputBuffers[decOutputIndex].get(mDecodedVideo[j], 0, length);
781 + // Converts the decoded frame to NV21
782 + convertToNV21(j);
783 + if (j>=NB_DECODED-1) {
784 + flushMediaCodec(mDecoder);
785 + if (VERBOSE) Log.v(TAG, "Decoding "+n+" frames took "+elapsed/1000+" ms");
786 + return elapsed;
787 + }
788 + j++;
789 + }
790 + mDecoder.releaseOutputBuffer(decOutputIndex, false);
791 + n++;
792 + }
793 + elapsed = timestamp() - now;
794 + }
795 +
796 + throw new RuntimeException("The decoder did not decode anything.");
797 +
798 + }
799 +
800 + /**
801 + * Makes sure the NAL has a header or not.
802 + * @param withPrefix If set to true, the NAL will be preceded with 0x00000001.
803 + */
804 + private boolean hasPrefix(byte[] nal) {
805 + return nal[0] == 0 && nal[1] == 0 && nal[2] == 0 && nal[3] == 0x01;
806 + }
807 +
808 + /**
809 + * @throws IOException The decoder cannot be configured.
810 + */
811 + private void encodeDecode() throws IOException {
812 + encode();
813 + try {
814 + configureDecoder();
815 + decode(true);
816 + } finally {
817 + releaseDecoder();
818 + }
819 + }
820 +
821 + private void flushMediaCodec(MediaCodec mc) {
822 + int index = 0;
823 + BufferInfo info = new BufferInfo();
824 + while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
825 + index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
826 + if (index>=0) {
827 + mc.releaseOutputBuffer(index, false);
828 + }
829 + }
830 + }
831 +
832 + private void check(boolean cond, String message) {
833 + if (!cond) {
834 + if (VERBOSE) Log.e(TAG,message);
835 + throw new IllegalStateException(message);
836 + }
837 + }
838 +
839 + private long timestamp() {
840 + return System.nanoTime()/1000;
841 + }
842 +
843 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.hw;
20 +
21 +import java.nio.ByteBuffer;
22 +import android.media.MediaCodecInfo;
23 +import android.util.Log;
24 +
25 +/**
26 + * Converts from NV21 to YUV420 semi planar or planar.
27 + */
28 +public class NV21Convertor {
29 +
30 + private int mSliceHeight, mHeight;
31 + private int mStride, mWidth;
32 + private int mSize;
33 + private boolean mPlanar, mPanesReversed = false;
34 + private int mYPadding;
35 + private byte[] mBuffer;
36 + ByteBuffer mCopy;
37 +
38 + public void setSize(int width, int height) {
39 + mHeight = height;
40 + mWidth = width;
41 + mSliceHeight = height;
42 + mStride = width;
43 + mSize = mWidth*mHeight;
44 + }
45 +
46 + public void setStride(int width) {
47 + mStride = width;
48 + }
49 +
50 + public void setSliceHeigth(int height) {
51 + mSliceHeight = height;
52 + }
53 +
54 + public void setPlanar(boolean planar) {
55 + mPlanar = planar;
56 + }
57 +
58 + public void setYPadding(int padding) {
59 + mYPadding = padding;
60 + }
61 +
62 + public int getBufferSize() {
63 + return 3*mSize/2;
64 + }
65 +
66 + public void setEncoderColorFormat(int colorFormat) {
67 + switch (colorFormat) {
68 + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
69 + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
70 + case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
71 + setPlanar(false);
72 + break;
73 + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
74 + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
75 + setPlanar(true);
76 + break;
77 + }
78 + }
79 +
80 + public void setColorPanesReversed(boolean b) {
81 + mPanesReversed = b;
82 + }
83 +
84 + public int getStride() {
85 + return mStride;
86 + }
87 +
88 + public int getSliceHeigth() {
89 + return mSliceHeight;
90 + }
91 +
92 + public int getYPadding() {
93 + return mYPadding;
94 + }
95 +
96 +
97 + public boolean getPlanar() {
98 + return mPlanar;
99 + }
100 +
101 + public boolean getUVPanesReversed() {
102 + return mPanesReversed;
103 + }
104 +
105 + public void convert(byte[] data, ByteBuffer buffer) {
106 + byte[] result = convert(data);
107 + int min = buffer.capacity() < data.length?buffer.capacity() : data.length;
108 + buffer.put(result, 0, min);
109 + }
110 +
111 + public byte[] convert(byte[] data) {
112 +
113 + // A buffer large enough for every case
114 + if (mBuffer==null || mBuffer.length != 3*mSliceHeight*mStride/2+mYPadding) {
115 + mBuffer = new byte[3*mSliceHeight*mStride/2+mYPadding];
116 + }
117 +
118 + if (!mPlanar) {
119 + if (mSliceHeight==mHeight && mStride==mWidth) {
120 + // Swaps U and V
121 + if (!mPanesReversed) {
122 + for (int i = mSize; i < mSize+mSize/2; i += 2) {
123 + mBuffer[0] = data[i+1];
124 + data[i+1] = data[i];
125 + data[i] = mBuffer[0];
126 + }
127 + }
128 + if (mYPadding>0) {
129 + System.arraycopy(data, 0, mBuffer, 0, mSize);
130 + System.arraycopy(data, mSize, mBuffer, mSize+mYPadding, mSize/2);
131 + return mBuffer;
132 + }
133 + return data;
134 + }
135 + } else {
136 + if (mSliceHeight==mHeight && mStride==mWidth) {
137 + // De-interleave U and V
138 + if (!mPanesReversed) {
139 + for (int i = 0; i < mSize/4; i+=1) {
140 + mBuffer[i] = data[mSize+2*i+1];
141 + mBuffer[mSize/4+i] = data[mSize+2*i];
142 + }
143 + } else {
144 + for (int i = 0; i < mSize/4; i+=1) {
145 + mBuffer[i] = data[mSize+2*i];
146 + mBuffer[mSize/4+i] = data[mSize+2*i+1];
147 + }
148 + }
149 + if (mYPadding == 0) {
150 + System.arraycopy(mBuffer, 0, data, mSize, mSize/2);
151 + } else {
152 + System.arraycopy(data, 0, mBuffer, 0, mSize);
153 + System.arraycopy(mBuffer, 0, mBuffer, mSize+mYPadding, mSize/2);
154 + return mBuffer;
155 + }
156 + return data;
157 + }
158 + }
159 +
160 + return data;
161 + }
162 +
163 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.mp4;
20 +
21 +import java.io.FileNotFoundException;
22 +import java.io.IOException;
23 +import android.util.Base64;
24 +import android.util.Log;
25 +
26 +/**
27 + * Finds SPS & PPS parameters in mp4 file.
28 + */
29 +public class MP4Config {
30 +
31 + public final static String TAG = "MP4Config";
32 +
33 + private MP4Parser mp4Parser;
34 + private String mProfilLevel, mPPS, mSPS;
35 +
36 + public MP4Config(String profil, String sps, String pps) {
37 + mProfilLevel = profil;
38 + mPPS = pps;
39 + mSPS = sps;
40 + }
41 +
42 + public MP4Config(String sps, String pps) {
43 + mPPS = pps;
44 + mSPS = sps;
45 + mProfilLevel = MP4Parser.toHexString(Base64.decode(sps, Base64.NO_WRAP),1,3);
46 + }
47 +
48 + public MP4Config(byte[] sps, byte[] pps) {
49 + mPPS = Base64.encodeToString(pps, 0, pps.length, Base64.NO_WRAP);
50 + mSPS = Base64.encodeToString(sps, 0, sps.length, Base64.NO_WRAP);
51 + mProfilLevel = MP4Parser.toHexString(sps,1,3);
52 + }
53 +
54 + /**
55 + * Finds SPS & PPS parameters inside a .mp4.
56 + * @param path Path to the file to analyze
57 + * @throws IOException
58 + * @throws FileNotFoundException
59 + */
60 + public MP4Config (String path) throws IOException, FileNotFoundException {
61 +
62 + StsdBox stsdBox;
63 +
64 + // We open the mp4 file and parse it
65 + try {
66 + mp4Parser = MP4Parser.parse(path);
67 + } catch (IOException ignore) {
68 + // Maybe enough of the file has been parsed and we can get the stsd box
69 + }
70 +
71 + // We find the stsdBox
72 + stsdBox = mp4Parser.getStsdBox();
73 + mPPS = stsdBox.getB64PPS();
74 + mSPS = stsdBox.getB64SPS();
75 + mProfilLevel = stsdBox.getProfileLevel();
76 +
77 + mp4Parser.close();
78 +
79 + }
80 +
81 + public String getProfileLevel() {
82 + return mProfilLevel;
83 + }
84 +
85 + public String getB64PPS() {
86 + Log.d(TAG, "PPS: "+mPPS);
87 + return mPPS;
88 + }
89 +
90 + public String getB64SPS() {
91 + Log.d(TAG, "SPS: "+mSPS);
92 + return mSPS;
93 + }
94 +
95 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.mp4;
20 +
21 +import java.io.File;
22 +import java.io.FileNotFoundException;
23 +import java.io.IOException;
24 +import java.io.RandomAccessFile;
25 +import java.nio.ByteBuffer;
26 +import java.util.HashMap;
27 +import android.util.Base64;
28 +import android.util.Log;
29 +
30 +/**
31 + * Parse an mp4 file.
32 + * An mp4 file contains a tree where each node has a name and a size.
33 + * This class is used by H264Stream.java to determine the SPS and PPS parameters of a short video recorded by the phone.
34 + */
35 +public class MP4Parser {
36 +
37 + private static final String TAG = "MP4Parser";
38 +
39 + private HashMap<String, Long> mBoxes = new HashMap<>();
40 + private final RandomAccessFile mFile;
41 + private long mPos = 0;
42 +
43 +
44 + /** Parses the mp4 file. **/
45 + public static MP4Parser parse(String path) throws IOException {
46 + return new MP4Parser(path);
47 + }
48 +
49 + private MP4Parser(final String path) throws IOException, FileNotFoundException {
50 + mFile = new RandomAccessFile(new File(path), "r");
51 + try {
52 + parse("",mFile.length());
53 + } catch (Exception e) {
54 + e.printStackTrace();
55 + throw new IOException("Parse error: malformed mp4 file");
56 + }
57 + }
58 +
59 + public void close() {
60 + try {
61 + mFile.close();
62 + } catch (Exception e) {};
63 + }
64 +
65 + public long getBoxPos(String box) throws IOException {
66 + Long r = mBoxes.get(box);
67 +
68 + if (r==null) throw new IOException("Box not found: "+box);
69 + return mBoxes.get(box);
70 + }
71 +
72 + public StsdBox getStsdBox() throws IOException {
73 + try {
74 + return new StsdBox(mFile,getBoxPos("/moov/trak/mdia/minf/stbl/stsd"));
75 + } catch (IOException e) {
76 + throw new IOException("stsd box could not be found");
77 + }
78 + }
79 +
80 + private void parse(String path, long len) throws IOException {
81 + ByteBuffer byteBuffer;
82 + long sum = 0, newlen = 0;
83 + byte[] buffer = new byte[8];
84 + String name = "";
85 +
86 + if(!path.equals("")) mBoxes.put(path, mPos-8);
87 +
88 + while (sum<len) {
89 + mFile.read(buffer,0,8);
90 + mPos += 8; sum += 8;
91 +
92 + if (validBoxName(buffer)) {
93 + name = new String(buffer,4,4);
94 +
95 + if (buffer[3] == 1) {
96 + // 64 bits atom size
97 + mFile.read(buffer,0,8);
98 + mPos += 8; sum += 8;
99 + byteBuffer = ByteBuffer.wrap(buffer,0,8);
100 + newlen = byteBuffer.getLong()-16;
101 + } else {
102 + // 32 bits atom size
103 + byteBuffer = ByteBuffer.wrap(buffer,0,4);
104 + newlen = byteBuffer.getInt()-8;
105 + }
106 +
107 + // 1061109559+8 correspond to "????" in ASCII the HTC Desire S seems to write that sometimes, maybe other phones do
108 + // "wide" atom would produce a newlen == 0, and we shouldn't throw an exception because of that
109 + if (newlen < 0 || newlen == 1061109559) throw new IOException();
110 +
111 + Log.d(TAG, "Atom -> name: "+name+" position: "+mPos+", length: "+newlen);
112 + sum += newlen;
113 + parse(path+'/'+name,newlen);
114 +
115 + }
116 + else {
117 + if( len < 8){
118 + mFile.seek(mFile.getFilePointer() - 8 + len);
119 + sum += len-8;
120 + } else {
121 + int skipped = mFile.skipBytes((int)(len-8));
122 + if (skipped < ((int)(len-8))) {
123 + throw new IOException();
124 + }
125 + mPos += len-8;
126 + sum += len-8;
127 + }
128 + }
129 + }
130 + }
131 +
132 + private boolean validBoxName(byte[] buffer) {
133 + for (int i=0;i<4;i++) {
134 + // If the next 4 bytes are neither lowercase letters nor numbers
135 + if ((buffer[i+4]< 'a' || buffer[i+4]>'z') && (buffer[i+4]<'0'|| buffer[i+4]>'9') ) return false;
136 + }
137 + return true;
138 + }
139 +
140 + static String toHexString(byte[] buffer,int start, int len) {
141 + String c;
142 + StringBuilder s = new StringBuilder();
143 + for (int i=start;i<start+len;i++) {
144 + c = Integer.toHexString(buffer[i]&0xFF);
145 + s.append( c.length()<2 ? "0"+c : c );
146 + }
147 + return s.toString();
148 + }
149 +
150 +}
151 +
152 +class StsdBox {
153 +
154 + private RandomAccessFile fis;
155 + private byte[] buffer = new byte[4];
156 + private long pos = 0;
157 +
158 + private byte[] pps;
159 + private byte[] sps;
160 + private int spsLength, ppsLength;
161 +
162 + /** Parse the sdsd box in an mp4 file
163 + * fis: proper mp4 file
164 + * pos: stsd box's position in the file
165 + */
166 + public StsdBox (RandomAccessFile fis, long pos) {
167 +
168 + this.fis = fis;
169 + this.pos = pos;
170 +
171 + findBoxAvcc();
172 + findSPSandPPS();
173 +
174 + }
175 +
176 + public String getProfileLevel() {
177 + return MP4Parser.toHexString(sps,1,3);
178 + }
179 +
180 + public String getB64PPS() {
181 + return Base64.encodeToString(pps, 0, ppsLength, Base64.NO_WRAP);
182 + }
183 +
184 + public String getB64SPS() {
185 + return Base64.encodeToString(sps, 0, spsLength, Base64.NO_WRAP);
186 + }
187 +
188 + private boolean findSPSandPPS() {
189 + /*
190 + * SPS and PPS parameters are stored in the avcC box
191 + * You may find really useful information about this box
192 + * in the document ISO-IEC 14496-15, part 5.2.4.1.1
193 + * The box's structure is described there
194 + * <pre>
195 + * aligned(8) class AVCDecoderConfigurationRecord {
196 + * unsigned int(8) configurationVersion = 1;
197 + * unsigned int(8) AVCProfileIndication;
198 + * unsigned int(8) profile_compatibility;
199 + * unsigned int(8) AVCLevelIndication;
200 + * bit(6) reserved = ‘111111’b;
201 + * unsigned int(2) lengthSizeMinusOne;
202 + * bit(3) reserved = ‘111’b;
203 + * unsigned int(5) numOfSequenceParameterSets;
204 + * for (i=0; i< numOfSequenceParameterSets; i++) {
205 + * unsigned int(16) sequenceParameterSetLength ;
206 + * bit(8*sequenceParameterSetLength) sequenceParameterSetNALUnit;
207 + * }
208 + * unsigned int(8) numOfPictureParameterSets;
209 + * for (i=0; i< numOfPictureParameterSets; i++) {
210 + * unsigned int(16) pictureParameterSetLength;
211 + * bit(8*pictureParameterSetLength) pictureParameterSetNALUnit;
212 + * }
213 + * }
214 + * </pre>
215 + */
216 + try {
217 +
218 + // TODO: Here we assume that numOfSequenceParameterSets = 1, numOfPictureParameterSets = 1 !
219 + // Here we extract the SPS parameter
220 + fis.skipBytes(7);
221 + spsLength = 0xFF&fis.readByte();
222 + sps = new byte[spsLength];
223 + fis.read(sps,0,spsLength);
224 + // Here we extract the PPS parameter
225 + fis.skipBytes(2);
226 + ppsLength = 0xFF&fis.readByte();
227 + pps = new byte[ppsLength];
228 + fis.read(pps,0,ppsLength);
229 +
230 + } catch (IOException e) {
231 + return false;
232 + }
233 +
234 + return true;
235 + }
236 +
237 + private boolean findBoxAvcc() {
238 + try {
239 + fis.seek(pos+8);
240 + while (true) {
241 + while (fis.read() != 'a');
242 + fis.read(buffer,0,3);
243 + if (buffer[0] == 'v' && buffer[1] == 'c' && buffer[2] == 'C') break;
244 + }
245 + } catch (IOException e) {
246 + return false;
247 + }
248 + return true;
249 +
250 + }
251 +
252 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtcp;
20 +
21 +import static net.majorkernelpanic.streaming.rtp.RtpSocket.TRANSPORT_TCP;
22 +import static net.majorkernelpanic.streaming.rtp.RtpSocket.TRANSPORT_UDP;
23 +import java.io.IOException;
24 +import java.io.OutputStream;
25 +import java.net.DatagramPacket;
26 +import java.net.InetAddress;
27 +import java.net.MulticastSocket;
28 +import java.nio.channels.IllegalSelectorException;
29 +import android.os.SystemClock;
30 +import android.util.Log;
31 +
32 +/**
33 + * Implementation of Sender Report RTCP packets.
34 + */
35 +public class SenderReport {
36 +
37 + public static final int MTU = 1500;
38 +
39 + private static final int PACKET_LENGTH = 28;
40 +
41 + private MulticastSocket usock;
42 + private DatagramPacket upack;
43 +
44 + private int mTransport;
45 + private OutputStream mOutputStream = null;
46 + private byte[] mBuffer = new byte[MTU];
47 + private int mSSRC, mPort = -1;
48 + private int mOctetCount = 0, mPacketCount = 0;
49 + private long interval, delta, now, oldnow;
50 + private byte mTcpHeader[];
51 +
52 + public SenderReport(int ssrc) throws IOException {
53 + super();
54 + this.mSSRC = ssrc;
55 + }
56 +
57 + public SenderReport() {
58 +
59 + mTransport = TRANSPORT_UDP;
60 + mTcpHeader = new byte[] {'$',0,0,PACKET_LENGTH};
61 +
62 + /* Version(2) Padding(0) */
63 + /* ^ ^ PT = 0 */
64 + /* | | ^ */
65 + /* | -------- | */
66 + /* | |--------------------- */
67 + /* | || */
68 + /* | || */
69 + mBuffer[0] = (byte) Integer.parseInt("10000000",2);
70 +
71 + /* Packet Type PT */
72 + mBuffer[1] = (byte) 200;
73 +
74 + /* Byte 2,3 -> Length */
75 + setLong(PACKET_LENGTH/4-1, 2, 4);
76 +
77 + /* Byte 4,5,6,7 -> SSRC */
78 + /* Byte 8,9,10,11 -> NTP timestamp hb */
79 + /* Byte 12,13,14,15 -> NTP timestamp lb */
80 + /* Byte 16,17,18,19 -> RTP timestamp */
81 + /* Byte 20,21,22,23 -> packet count */
82 + /* Byte 24,25,26,27 -> octet count */
83 +
84 + try {
85 + usock = new MulticastSocket();
86 + } catch (IOException e) {
87 + // Very unlikely to happen. Means that all UDP ports are already being used
88 + throw new RuntimeException(e.getMessage());
89 + }
90 + upack = new DatagramPacket(mBuffer, 1);
91 +
92 + // By default we sent one report every 3 secconde
93 + interval = 3000;
94 +
95 + }
96 +
97 + public void close() {
98 + usock.close();
99 + }
100 +
101 + /**
102 + * Sets the temporal interval between two RTCP Sender Reports.
103 + * Default interval is set to 3 seconds.
104 + * Set 0 to disable RTCP.
105 + * @param interval The interval in milliseconds
106 + */
107 + public void setInterval(long interval) {
108 + this.interval = interval;
109 + }
110 +
111 + /**
112 + * Updates the number of packets sent, and the total amount of data sent.
113 + * @param length The length of the packet
114 + * @param rtpts
115 + * The RTP timestamp.
116 + * @throws IOException
117 + **/
118 + public void update(int length, long rtpts) throws IOException {
119 + mPacketCount += 1;
120 + mOctetCount += length;
121 + setLong(mPacketCount, 20, 24);
122 + setLong(mOctetCount, 24, 28);
123 +
124 + now = SystemClock.elapsedRealtime();
125 + delta += oldnow != 0 ? now-oldnow : 0;
126 + oldnow = now;
127 + if (interval>0 && delta>=interval) {
128 + // We send a Sender Report
129 + send(System.nanoTime(), rtpts);
130 + delta = 0;
131 + }
132 +
133 + }
134 +
135 + public void setSSRC(int ssrc) {
136 + this.mSSRC = ssrc;
137 + setLong(ssrc,4,8);
138 + mPacketCount = 0;
139 + mOctetCount = 0;
140 + setLong(mPacketCount, 20, 24);
141 + setLong(mOctetCount, 24, 28);
142 + }
143 +
144 + public void setDestination(InetAddress dest, int dport) {
145 + mTransport = TRANSPORT_UDP;
146 + mPort = dport;
147 + upack.setPort(dport);
148 + upack.setAddress(dest);
149 + }
150 +
151 + /**
152 + * If a TCP is used as the transport protocol for the RTP session,
153 + * the output stream to which RTP packets will be written to must
154 + * be specified with this method.
155 + */
156 + public void setOutputStream(OutputStream os, byte channelIdentifier) {
157 + mTransport = TRANSPORT_TCP;
158 + mOutputStream = os;
159 + mTcpHeader[1] = channelIdentifier;
160 + }
161 +
162 + public int getPort() {
163 + return mPort;
164 + }
165 +
166 + public int getLocalPort() {
167 + return usock.getLocalPort();
168 + }
169 +
170 + public int getSSRC() {
171 + return mSSRC;
172 + }
173 +
174 + /**
175 + * Resets the reports (total number of bytes sent, number of packets sent, etc.)
176 + */
177 + public void reset() {
178 + mPacketCount = 0;
179 + mOctetCount = 0;
180 + setLong(mPacketCount, 20, 24);
181 + setLong(mOctetCount, 24, 28);
182 + delta = now = oldnow = 0;
183 + }
184 +
185 + private void setLong(long n, int begin, int end) {
186 + for (end--; end >= begin; end--) {
187 + mBuffer[end] = (byte) (n % 256);
188 + n >>= 8;
189 + }
190 + }
191 +
192 + /**
193 + * Sends the RTCP packet over the network.
194 + *
195 + * @param ntpts
196 + * the NTP timestamp.
197 + * @param rtpts
198 + * the RTP timestamp.
199 + */
200 + private void send(long ntpts, long rtpts) throws IOException {
201 + long hb = ntpts/1000000000;
202 + long lb = ( ( ntpts - hb*1000000000 ) * 4294967296L )/1000000000;
203 + setLong(hb, 8, 12);
204 + setLong(lb, 12, 16);
205 + setLong(rtpts, 16, 20);
206 + if (mTransport == TRANSPORT_UDP) {
207 + upack.setLength(PACKET_LENGTH);
208 + usock.send(upack);
209 + } else {
210 + synchronized (mOutputStream) {
211 + try {
212 + mOutputStream.write(mTcpHeader);
213 + mOutputStream.write(mBuffer, 0, PACKET_LENGTH);
214 + } catch (Exception e) {}
215 + }
216 + }
217 + }
218 +
219 +
220 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtp;
20 +
21 +import java.io.IOException;
22 +import net.majorkernelpanic.streaming.audio.AACStream;
23 +import android.os.SystemClock;
24 +import android.util.Log;
25 +
26 +/**
27 + *
28 + * RFC 3640.
29 + *
30 + * This packetizer must be fed with an InputStream containing ADTS AAC.
31 + * AAC will basically be rewrapped in an RTP stream and sent over the network.
32 + * This packetizer only implements the aac-hbr mode (High Bit-rate AAC) and
33 + * each packet only carry a single and complete AAC access unit.
34 + *
35 + */
36 +public class AACADTSPacketizer extends AbstractPacketizer implements Runnable {
37 +
38 + private final static String TAG = "AACADTSPacketizer";
39 +
40 + private Thread t;
41 + private int samplingRate = 8000;
42 +
43 + public AACADTSPacketizer() {
44 + super();
45 + }
46 +
47 + public void start() {
48 + if (t==null) {
49 + t = new Thread(this);
50 + t.start();
51 + }
52 + }
53 +
54 + public void stop() {
55 + if (t != null) {
56 + try {
57 + is.close();
58 + } catch (IOException ignore) {}
59 + t.interrupt();
60 + try {
61 + t.join();
62 + } catch (InterruptedException e) {}
63 + t = null;
64 + }
65 + }
66 +
67 + public void setSamplingRate(int samplingRate) {
68 + this.samplingRate = samplingRate;
69 + socket.setClockFrequency(samplingRate);
70 + }
71 +
72 + public void run() {
73 +
74 + Log.d(TAG,"AAC ADTS packetizer started !");
75 +
76 + // "A packet SHALL carry either one or more complete Access Units, or a
77 + // single fragment of an Access Unit. Fragments of the same Access Unit
78 + // have the same time stamp but different RTP sequence numbers. The
79 + // marker bit in the RTP header is 1 on the last fragment of an Access
80 + // Unit, and 0 on all other fragments." RFC 3640
81 +
82 + // ADTS header fields that we need to parse
83 + boolean protection;
84 + int frameLength, sum, length, nbau, nbpk, samplingRateIndex, profile;
85 + long oldtime = SystemClock.elapsedRealtime(), now = oldtime;
86 + byte[] header = new byte[8];
87 +
88 + try {
89 + while (!Thread.interrupted()) {
90 +
91 + // Synchronisation: ADTS packet starts with 12bits set to 1
92 + while (true) {
93 + if ( (is.read()&0xFF) == 0xFF ) {
94 + header[1] = (byte) is.read();
95 + if ( (header[1]&0xF0) == 0xF0) break;
96 + }
97 + }
98 +
99 + // Parse adts header (ADTS packets start with a 7 or 9 byte long header)
100 + fill(header, 2, 5);
101 +
102 + // The protection bit indicates whether or not the header contains the two extra bytes
103 + protection = (header[1]&0x01)>0 ? true : false;
104 + frameLength = (header[3]&0x03) << 11 |
105 + (header[4]&0xFF) << 3 |
106 + (header[5]&0xFF) >> 5 ;
107 + frameLength -= (protection ? 7 : 9);
108 +
109 + // Number of AAC frames in the ADTS frame
110 + nbau = (header[6]&0x03) + 1;
111 +
112 + // The number of RTP packets that will be sent for this ADTS frame
113 + nbpk = frameLength/MAXPACKETSIZE + 1;
114 +
115 + // Read CRS if any
116 + if (!protection) is.read(header,0,2);
117 +
118 + samplingRate = AACStream.AUDIO_SAMPLING_RATES[(header[2]&0x3C) >> 2];
119 + profile = ( (header[2]&0xC0) >> 6 ) + 1 ;
120 +
121 + // We update the RTP timestamp
122 + ts += 1024L*1000000000L/samplingRate; //stats.average();
123 +
124 + //Log.d(TAG,"frameLength: "+frameLength+" protection: "+protection+" p: "+profile+" sr: "+samplingRate);
125 +
126 + sum = 0;
127 + while (sum<frameLength) {
128 +
129 + buffer = socket.requestBuffer();
130 + socket.updateTimestamp(ts);
131 +
132 + // Read frame
133 + if (frameLength-sum > MAXPACKETSIZE-rtphl-4) {
134 + length = MAXPACKETSIZE-rtphl-4;
135 + }
136 + else {
137 + length = frameLength-sum;
138 + socket.markNextPacket();
139 + }
140 + sum += length;
141 + fill(buffer, rtphl+4, length);
142 +
143 + // AU-headers-length field: contains the size in bits of a AU-header
144 + // 13+3 = 16 bits -> 13bits for AU-size and 3bits for AU-Index / AU-Index-delta
145 + // 13 bits will be enough because ADTS uses 13 bits for frame length
146 + buffer[rtphl] = 0;
147 + buffer[rtphl+1] = 0x10;
148 +
149 + // AU-size
150 + buffer[rtphl+2] = (byte) (frameLength>>5);
151 + buffer[rtphl+3] = (byte) (frameLength<<3);
152 +
153 + // AU-Index
154 + buffer[rtphl+3] &= 0xF8;
155 + buffer[rtphl+3] |= 0x00;
156 +
157 + send(rtphl+4+length);
158 +
159 + }
160 +
161 + }
162 + } catch (IOException e) {
163 + // Ignore
164 + } catch (ArrayIndexOutOfBoundsException e) {
165 + Log.e(TAG,"ArrayIndexOutOfBoundsException: "+(e.getMessage()!=null?e.getMessage():"unknown error"));
166 + e.printStackTrace();
167 + } catch (InterruptedException ignore) {}
168 +
169 + Log.d(TAG,"AAC ADTS packetizer stopped !");
170 +
171 + }
172 +
173 + private int fill(byte[] buffer, int offset,int length) throws IOException {
174 + int sum = 0, len;
175 + while (sum<length) {
176 + len = is.read(buffer, offset+sum, length-sum);
177 + if (len<0) {
178 + throw new IOException("End of stream");
179 + }
180 + else sum+=len;
181 + }
182 + return sum;
183 + }
184 +
185 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtp;
20 +
21 +import java.io.IOException;
22 +import android.annotation.SuppressLint;
23 +import android.media.MediaCodec.BufferInfo;
24 +import android.os.SystemClock;
25 +import android.util.Log;
26 +
27 +/**
28 + * RFC 3640.
29 + *
30 + * Encapsulates AAC Access Units in RTP packets as specified in the RFC 3640.
31 + * This packetizer is used by the AACStream class in conjunction with the
32 + * MediaCodec API introduced in Android 4.1 (API Level 16).
33 + *
34 + */
35 +@SuppressLint("NewApi")
36 +public class AACLATMPacketizer extends AbstractPacketizer implements Runnable {
37 +
38 + private final static String TAG = "AACLATMPacketizer";
39 +
40 + private Thread t;
41 +
42 + public AACLATMPacketizer() {
43 + super();
44 + socket.setCacheSize(0);
45 + }
46 +
47 + public void start() {
48 + if (t==null) {
49 + t = new Thread(this);
50 + t.start();
51 + }
52 + }
53 +
54 + public void stop() {
55 + if (t != null) {
56 + try {
57 + is.close();
58 + } catch (IOException ignore) {}
59 + t.interrupt();
60 + try {
61 + t.join();
62 + } catch (InterruptedException e) {}
63 + t = null;
64 + }
65 + }
66 +
67 + public void setSamplingRate(int samplingRate) {
68 + socket.setClockFrequency(samplingRate);
69 + }
70 +
71 + @SuppressLint("NewApi")
72 + public void run() {
73 +
74 + Log.d(TAG,"AAC LATM packetizer started !");
75 +
76 + int length = 0;
77 + long oldts;
78 + BufferInfo bufferInfo;
79 +
80 + try {
81 + while (!Thread.interrupted()) {
82 + buffer = socket.requestBuffer();
83 + length = is.read(buffer, rtphl+4, MAXPACKETSIZE-(rtphl+4));
84 +
85 + if (length>0) {
86 +
87 + bufferInfo = ((MediaCodecInputStream)is).getLastBufferInfo();
88 + //Log.d(TAG,"length: "+length+" ts: "+bufferInfo.presentationTimeUs);
89 + oldts = ts;
90 + ts = bufferInfo.presentationTimeUs*1000;
91 +
92 + // Seems to happen sometimes
93 + if (oldts>ts) {
94 + socket.commitBuffer();
95 + continue;
96 + }
97 +
98 + socket.markNextPacket();
99 + socket.updateTimestamp(ts);
100 +
101 + // AU-headers-length field: contains the size in bits of a AU-header
102 + // 13+3 = 16 bits -> 13bits for AU-size and 3bits for AU-Index / AU-Index-delta
103 + // 13 bits will be enough because ADTS uses 13 bits for frame length
104 + buffer[rtphl] = 0;
105 + buffer[rtphl+1] = 0x10;
106 +
107 + // AU-size
108 + buffer[rtphl+2] = (byte) (length>>5);
109 + buffer[rtphl+3] = (byte) (length<<3);
110 +
111 + // AU-Index
112 + buffer[rtphl+3] &= 0xF8;
113 + buffer[rtphl+3] |= 0x00;
114 +
115 + send(rtphl+length+4);
116 +
117 + } else {
118 + socket.commitBuffer();
119 + }
120 +
121 + }
122 + } catch (IOException e) {
123 + } catch (ArrayIndexOutOfBoundsException e) {
124 + Log.e(TAG,"ArrayIndexOutOfBoundsException: "+(e.getMessage()!=null?e.getMessage():"unknown error"));
125 + e.printStackTrace();
126 + } catch (InterruptedException ignore) {}
127 +
128 + Log.d(TAG,"AAC LATM packetizer stopped !");
129 +
130 + }
131 +
132 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtp;
20 +
21 +import java.io.IOException;
22 +import android.util.Log;
23 +
24 +/**
25 + *
26 + * RFC 3267.
27 + *
28 + * AMR Streaming over RTP.
29 + *
30 + * Must be fed with an InputStream containing raw AMR NB
31 + * Stream must begin with a 6 bytes long header: "#!AMR\n", it will be skipped
32 + *
33 + */
34 +public class AMRNBPacketizer extends AbstractPacketizer implements Runnable {
35 +
36 + public final static String TAG = "AMRNBPacketizer";
37 +
38 + private final int AMR_HEADER_LENGTH = 6; // "#!AMR\n"
39 + private static final int AMR_FRAME_HEADER_LENGTH = 1; // Each frame has a short header
40 + private static final int[] sFrameBits = {95, 103, 118, 134, 148, 159, 204, 244};
41 + private int samplingRate = 8000;
42 +
43 + private Thread t;
44 +
45 + public AMRNBPacketizer() {
46 + super();
47 + socket.setClockFrequency(samplingRate);
48 + }
49 +
50 + public void start() {
51 + if (t==null) {
52 + t = new Thread(this);
53 + t.start();
54 + }
55 + }
56 +
57 + public void stop() {
58 + if (t != null) {
59 + try {
60 + is.close();
61 + } catch (IOException ignore) {}
62 + t.interrupt();
63 + try {
64 + t.join();
65 + } catch (InterruptedException e) {}
66 + t = null;
67 + }
68 + }
69 +
70 + public void run() {
71 +
72 + int frameLength, frameType;
73 + long now = System.nanoTime(), oldtime = now;
74 + byte[] header = new byte[AMR_HEADER_LENGTH];
75 +
76 + try {
77 +
78 + // Skip raw AMR header
79 + fill(header,0,AMR_HEADER_LENGTH);
80 +
81 + if (header[5] != '\n') {
82 + Log.e(TAG,"Bad header ! AMR not correcty supported by the phone !");
83 + return;
84 + }
85 +
86 + while (!Thread.interrupted()) {
87 +
88 + buffer = socket.requestBuffer();
89 + buffer[rtphl] = (byte) 0xF0;
90 +
91 + // First we read the frame header
92 + fill(buffer, rtphl+1,AMR_FRAME_HEADER_LENGTH);
93 +
94 + // Then we calculate the frame payload length
95 + frameType = (Math.abs(buffer[rtphl + 1]) >> 3) & 0x0f;
96 + frameLength = (sFrameBits[frameType]+7)/8;
97 +
98 + // And we read the payload
99 + fill(buffer, rtphl+2,frameLength);
100 +
101 + //Log.d(TAG,"Frame length: "+frameLength+" frameType: "+frameType);
102 +
103 + // RFC 3267 Page 14: "For AMR, the sampling frequency is 8 kHz"
104 + // FIXME: Is this really always the case ??
105 + ts += 160L*1000000000L/samplingRate; //stats.average();
106 + socket.updateTimestamp(ts);
107 + socket.markNextPacket();
108 +
109 + //Log.d(TAG,"expected: "+ expected + " measured: "+measured);
110 +
111 + send(rtphl+1+AMR_FRAME_HEADER_LENGTH+frameLength);
112 +
113 + }
114 +
115 + } catch (IOException e) {
116 + } catch (InterruptedException e) {}
117 +
118 + Log.d(TAG,"AMR packetizer stopped !");
119 +
120 + }
121 +
122 + private int fill(byte[] buffer, int offset,int length) throws IOException {
123 + int sum = 0, len;
124 + while (sum<length) {
125 + len = is.read(buffer, offset+sum, length-sum);
126 + if (len<0) {
127 + throw new IOException("End of stream");
128 + }
129 + else sum+=len;
130 + }
131 + return sum;
132 + }
133 +
134 +
135 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtp;
20 +
21 +import java.io.IOException;
22 +import java.io.InputStream;
23 +import java.io.OutputStream;
24 +import java.net.InetAddress;
25 +import java.util.Random;
26 +import net.majorkernelpanic.streaming.rtcp.SenderReport;
27 +
28 +/**
29 + *
30 + * Each packetizer inherits from this one and therefore uses RTP and UDP.
31 + *
32 + */
33 +abstract public class AbstractPacketizer {
34 +
35 + protected static final int rtphl = RtpSocket.RTP_HEADER_LENGTH;
36 +
37 + // Maximum size of RTP packets
38 + protected final static int MAXPACKETSIZE = RtpSocket.MTU-28;
39 +
40 + protected RtpSocket socket = null;
41 + protected InputStream is = null;
42 + protected byte[] buffer;
43 +
44 + protected long ts = 0;
45 +
46 + public AbstractPacketizer() {
47 + int ssrc = new Random().nextInt();
48 + ts = new Random().nextInt();
49 + socket = new RtpSocket();
50 + socket.setSSRC(ssrc);
51 + }
52 +
53 + public RtpSocket getRtpSocket() {
54 + return socket;
55 + }
56 +
57 + public void setSSRC(int ssrc) {
58 + socket.setSSRC(ssrc);
59 + }
60 +
61 + public int getSSRC() {
62 + return socket.getSSRC();
63 + }
64 +
65 + public void setInputStream(InputStream is) {
66 + this.is = is;
67 + }
68 +
69 + public void setTimeToLive(int ttl) throws IOException {
70 + socket.setTimeToLive(ttl);
71 + }
72 +
73 + /**
74 + * Sets the destination of the stream.
75 + * @param dest The destination address of the stream
76 + * @param rtpPort Destination port that will be used for RTP
77 + * @param rtcpPort Destination port that will be used for RTCP
78 + */
79 + public void setDestination(InetAddress dest, int rtpPort, int rtcpPort) {
80 + socket.setDestination(dest, rtpPort, rtcpPort);
81 + }
82 +
83 + /** Starts the packetizer. */
84 + public abstract void start();
85 +
86 + /** Stops the packetizer. */
87 + public abstract void stop();
88 +
89 + /** Updates data for RTCP SR and sends the packet. */
90 + protected void send(int length) throws IOException {
91 + socket.commitBuffer(length);
92 + }
93 +
94 + /** For debugging purposes. */
95 + protected static String printBuffer(byte[] buffer, int start,int end) {
96 + String str = "";
97 + for (int i=start;i<end;i++) str+=","+Integer.toHexString(buffer[i]&0xFF);
98 + return str;
99 + }
100 +
101 + /** Used in packetizers to estimate timestamps in RTP packets. */
102 + protected static class Statistics {
103 +
104 + public final static String TAG = "Statistics";
105 +
106 + private int count=700, c = 0;
107 + private float m = 0, q = 0;
108 + private long elapsed = 0;
109 + private long start = 0;
110 + private long duration = 0;
111 + private long period = 10000000000L;
112 + private boolean initoffset = false;
113 +
114 + public Statistics() {}
115 +
116 + public Statistics(int count, int period) {
117 + this.count = count;
118 + this.period = period;
119 + }
120 +
121 + public void reset() {
122 + initoffset = false;
123 + q = 0; m = 0; c = 0;
124 + elapsed = 0;
125 + start = 0;
126 + duration = 0;
127 + }
128 +
129 + public void push(long value) {
130 + elapsed += value;
131 + if (elapsed>period) {
132 + elapsed = 0;
133 + long now = System.nanoTime();
134 + if (!initoffset || (now - start < 0)) {
135 + start = now;
136 + duration = 0;
137 + initoffset = true;
138 + }
139 + // Prevents drifting issues by comparing the real duration of the
140 + // stream with the sum of all temporal lengths of RTP packets.
141 + value += (now - start) - duration;
142 + //Log.d(TAG, "sum1: "+duration/1000000+" sum2: "+(now-start)/1000000+" drift: "+((now-start)-duration)/1000000+" v: "+value/1000000);
143 + }
144 + if (c<5) {
145 + // We ignore the first 20 measured values because they may not be accurate
146 + c++;
147 + m = value;
148 + } else {
149 + m = (m*q+value)/(q+1);
150 + if (q<count) q++;
151 + }
152 + }
153 +
154 + public long average() {
155 + long l = (long)m;
156 + duration += l;
157 + return l;
158 + }
159 +
160 + }
161 +
162 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtp;
20 +
21 +import java.io.IOException;
22 +import android.util.Log;
23 +
24 +/**
25 + * RFC 4629.
26 + *
27 + * H.263 Streaming over RTP.
28 + *
29 + * Must be fed with an InputStream containing H.263 frames.
30 + * The stream must start with mpeg4 or 3gpp header, it will be skipped.
31 + *
32 + */
33 +public class H263Packetizer extends AbstractPacketizer implements Runnable {
34 +
35 + public final static String TAG = "H263Packetizer";
36 + private Statistics stats = new Statistics();
37 +
38 + private Thread t;
39 +
40 + public H263Packetizer() {
41 + super();
42 + socket.setClockFrequency(90000);
43 + }
44 +
45 + public void start() {
46 + if (t==null) {
47 + t = new Thread(this);
48 + t.start();
49 + }
50 + }
51 +
52 + public void stop() {
53 + if (t != null) {
54 + try {
55 + is.close();
56 + } catch (IOException ignore) {}
57 + t.interrupt();
58 + try {
59 + t.join();
60 + } catch (InterruptedException e) {}
61 + t = null;
62 + }
63 + }
64 +
65 + public void run() {
66 + long time, duration = 0;
67 + int i = 0, j = 0, tr;
68 + boolean firstFragment = true;
69 + byte[] nextBuffer;
70 + stats.reset();
71 +
72 + try {
73 + while (!Thread.interrupted()) {
74 +
75 + if (j==0) buffer = socket.requestBuffer();
76 + socket.updateTimestamp(ts);
77 +
78 + // Each packet we send has a two byte long header (See section 5.1 of RFC 4629)
79 + buffer[rtphl] = 0;
80 + buffer[rtphl+1] = 0;
81 +
82 + time = System.nanoTime();
83 + if (fill(rtphl+j+2,MAXPACKETSIZE-rtphl-j-2)<0) return;
84 + duration += System.nanoTime() - time;
85 + j = 0;
86 + // Each h263 frame starts with: 0000 0000 0000 0000 1000 00??
87 + // Here we search where the next frame begins in the bit stream
88 + for (i=rtphl+2;i<MAXPACKETSIZE-1;i++) {
89 + if (buffer[i]==0 && buffer[i+1]==0 && (buffer[i+2]&0xFC)==0x80) {
90 + j=i;
91 + break;
92 + }
93 + }
94 + // Parse temporal reference
95 + tr = (buffer[i+2]&0x03)<<6 | (buffer[i+3]&0xFF)>>2;
96 + //Log.d(TAG,"j: "+j+" buffer: "+printBuffer(rtphl, rtphl+5)+" tr: "+tr);
97 + if (firstFragment) {
98 + // This is the first fragment of the frame -> header is set to 0x0400
99 + buffer[rtphl] = 4;
100 + firstFragment = false;
101 + } else {
102 + buffer[rtphl] = 0;
103 + }
104 + if (j>0) {
105 + // We have found the end of the frame
106 + stats.push(duration);
107 + ts+= stats.average(); duration = 0;
108 + //Log.d(TAG,"End of frame ! duration: "+stats.average());
109 + // The last fragment of a frame has to be marked
110 + socket.markNextPacket();
111 + send(j);
112 + nextBuffer = socket.requestBuffer();
113 + System.arraycopy(buffer,j+2,nextBuffer,rtphl+2,MAXPACKETSIZE-j-2);
114 + buffer = nextBuffer;
115 + j = MAXPACKETSIZE-j-2;
116 + firstFragment = true;
117 + } else {
118 + // We have not found the beginning of another frame
119 + // The whole packet is a fragment of a frame
120 + send(MAXPACKETSIZE);
121 + }
122 + }
123 + } catch (IOException e) {
124 + } catch (InterruptedException e) {}
125 +
126 + Log.d(TAG,"H263 Packetizer stopped !");
127 +
128 + }
129 +
130 + private int fill(int offset,int length) throws IOException {
131 +
132 + int sum = 0, len;
133 +
134 + while (sum<length) {
135 + len = is.read(buffer, offset+sum, length-sum);
136 + if (len<0) {
137 + throw new IOException("End of stream");
138 + }
139 + else sum+=len;
140 + }
141 +
142 + return sum;
143 +
144 + }
145 +
146 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtp;
20 +
21 +import java.io.IOException;
22 +import android.annotation.SuppressLint;
23 +import android.util.Log;
24 +
25 +/**
26 + *
27 + * RFC 3984.
28 + *
29 + * H.264 streaming over RTP.
30 + *
31 + * Must be fed with an InputStream containing H.264 NAL units preceded by their length (4 bytes).
32 + * The stream must start with mpeg4 or 3gpp header, it will be skipped.
33 + *
34 + */
35 +public class H264Packetizer extends AbstractPacketizer implements Runnable {
36 +
37 + public final static String TAG = "H264Packetizer";
38 +
39 + private Thread t = null;
40 + private int naluLength = 0;
41 + private long delay = 0, oldtime = 0;
42 + private Statistics stats = new Statistics();
43 + private byte[] sps = null, pps = null, stapa = null;
44 + byte[] header = new byte[5];
45 + private int count = 0;
46 + private int streamType = 1;
47 +
48 +
49 + public H264Packetizer() {
50 + super();
51 + socket.setClockFrequency(90000);
52 + }
53 +
54 + public void start() {
55 + if (t == null) {
56 + t = new Thread(this);
57 + t.start();
58 + }
59 + }
60 +
61 + public void stop() {
62 + if (t != null) {
63 + try {
64 + is.close();
65 + } catch (IOException e) {}
66 + t.interrupt();
67 + try {
68 + t.join();
69 + } catch (InterruptedException e) {}
70 + t = null;
71 + }
72 + }
73 +
74 + public void setStreamParameters(byte[] pps, byte[] sps) {
75 + this.pps = pps;
76 + this.sps = sps;
77 +
78 + // A STAP-A NAL (NAL type 24) containing the sps and pps of the stream
79 + if (pps != null && sps != null) {
80 + // STAP-A NAL header + NALU 1 (SPS) size + NALU 2 (PPS) size = 5 bytes
81 + stapa = new byte[sps.length + pps.length + 5];
82 +
83 + // STAP-A NAL header is 24
84 + stapa[0] = 24;
85 +
86 + // Write NALU 1 size into the array (NALU 1 is the SPS).
87 + stapa[1] = (byte) (sps.length >> 8);
88 + stapa[2] = (byte) (sps.length & 0xFF);
89 +
90 + // Write NALU 2 size into the array (NALU 2 is the PPS).
91 + stapa[sps.length + 3] = (byte) (pps.length >> 8);
92 + stapa[sps.length + 4] = (byte) (pps.length & 0xFF);
93 +
94 + // Write NALU 1 into the array, then write NALU 2 into the array.
95 + System.arraycopy(sps, 0, stapa, 3, sps.length);
96 + System.arraycopy(pps, 0, stapa, 5 + sps.length, pps.length);
97 + }
98 + }
99 +
100 + public void run() {
101 + long duration = 0;
102 + Log.d(TAG,"H264 packetizer started !");
103 + stats.reset();
104 + count = 0;
105 +
106 + if (is instanceof MediaCodecInputStream) {
107 + streamType = 1;
108 + socket.setCacheSize(0);
109 + } else {
110 + streamType = 0;
111 + socket.setCacheSize(400);
112 + }
113 +
114 + try {
115 + while (!Thread.interrupted()) {
116 +
117 + oldtime = System.nanoTime();
118 + // We read a NAL units from the input stream and we send them
119 + send();
120 + // We measure how long it took to receive NAL units from the phone
121 + duration = System.nanoTime() - oldtime;
122 +
123 + stats.push(duration);
124 + // Computes the average duration of a NAL unit
125 + delay = stats.average();
126 + //Log.d(TAG,"duration: "+duration/1000000+" delay: "+delay/1000000);
127 +
128 + }
129 + } catch (IOException e) {
130 + } catch (InterruptedException e) {}
131 +
132 + Log.d(TAG,"H264 packetizer stopped !");
133 +
134 + }
135 +
136 + /**
137 + * Reads a NAL unit in the FIFO and sends it.
138 + * If it is too big, we split it in FU-A units (RFC 3984).
139 + */
140 + @SuppressLint("NewApi")
141 + private void send() throws IOException, InterruptedException {
142 + int sum = 1, len = 0, type;
143 +
144 + if (streamType == 0) {
145 + // NAL units are preceeded by their length, we parse the length
146 + fill(header,0,5);
147 + ts += delay;
148 + naluLength = header[3]&0xFF | (header[2]&0xFF)<<8 | (header[1]&0xFF)<<16 | (header[0]&0xFF)<<24;
149 + if (naluLength>100000 || naluLength<0) resync();
150 + } else if (streamType == 1) {
151 + // NAL units are preceeded with 0x00000001
152 + fill(header,0,5);
153 + ts = ((MediaCodecInputStream)is).getLastBufferInfo().presentationTimeUs*1000L;
154 + //ts += delay;
155 + naluLength = is.available()+1;
156 + if (!(header[0]==0 && header[1]==0 && header[2]==0)) {
157 + // Turns out, the NAL units are not preceeded with 0x00000001
158 + Log.e(TAG, "NAL units are not preceeded by 0x00000001");
159 + streamType = 2;
160 + return;
161 + }
162 + } else {
163 + // Nothing preceededs the NAL units
164 + fill(header,0,1);
165 + header[4] = header[0];
166 + ts = ((MediaCodecInputStream)is).getLastBufferInfo().presentationTimeUs*1000L;
167 + //ts += delay;
168 + naluLength = is.available()+1;
169 + }
170 +
171 + // Parses the NAL unit type
172 + type = header[4]&0x1F;
173 +
174 +
175 + // The stream already contains NAL unit type 7 or 8, we don't need
176 + // to add them to the stream ourselves
177 + if (type == 7 || type == 8) {
178 + Log.v(TAG,"SPS or PPS present in the stream.");
179 + count++;
180 + if (count>4) {
181 + sps = null;
182 + pps = null;
183 + }
184 + }
185 +
186 + // We send two packets containing NALU type 7 (SPS) and 8 (PPS)
187 + // Those should allow the H264 stream to be decoded even if no SDP was sent to the decoder.
188 + if (type == 5 && sps != null && pps != null) {
189 + buffer = socket.requestBuffer();
190 + socket.markNextPacket();
191 + socket.updateTimestamp(ts);
192 + System.arraycopy(stapa, 0, buffer, rtphl, stapa.length);
193 + super.send(rtphl+stapa.length);
194 + }
195 +
196 + //Log.d(TAG,"- Nal unit length: " + naluLength + " delay: "+delay/1000000+" type: "+type);
197 +
198 + // Small NAL unit => Single NAL unit
199 + if (naluLength<=MAXPACKETSIZE-rtphl-2) {
200 + buffer = socket.requestBuffer();
201 + buffer[rtphl] = header[4];
202 + len = fill(buffer, rtphl+1, naluLength-1);
203 + socket.updateTimestamp(ts);
204 + socket.markNextPacket();
205 + super.send(naluLength+rtphl);
206 + //Log.d(TAG,"----- Single NAL unit - len:"+len+" delay: "+delay);
207 + }
208 + // Large NAL unit => Split nal unit
209 + else {
210 +
211 + // Set FU-A header
212 + header[1] = (byte) (header[4] & 0x1F); // FU header type
213 + header[1] += 0x80; // Start bit
214 + // Set FU-A indicator
215 + header[0] = (byte) ((header[4] & 0x60) & 0xFF); // FU indicator NRI
216 + header[0] += 28;
217 +
218 + while (sum < naluLength) {
219 + buffer = socket.requestBuffer();
220 + buffer[rtphl] = header[0];
221 + buffer[rtphl+1] = header[1];
222 + socket.updateTimestamp(ts);
223 + if ((len = fill(buffer, rtphl+2, naluLength-sum > MAXPACKETSIZE-rtphl-2 ? MAXPACKETSIZE-rtphl-2 : naluLength-sum ))<0) return; sum += len;
224 + // Last packet before next NAL
225 + if (sum >= naluLength) {
226 + // End bit on
227 + buffer[rtphl+1] += 0x40;
228 + socket.markNextPacket();
229 + }
230 + super.send(len+rtphl+2);
231 + // Switch start bit
232 + header[1] = (byte) (header[1] & 0x7F);
233 + //Log.d(TAG,"----- FU-A unit, sum:"+sum);
234 + }
235 + }
236 + }
237 +
238 + private int fill(byte[] buffer, int offset,int length) throws IOException {
239 + int sum = 0, len;
240 + while (sum<length) {
241 + len = is.read(buffer, offset+sum, length-sum);
242 + if (len<0) {
243 + throw new IOException("End of stream");
244 + }
245 + else sum+=len;
246 + }
247 + return sum;
248 + }
249 +
250 + private void resync() throws IOException {
251 + int type;
252 +
253 + Log.e(TAG,"Packetizer out of sync ! Let's try to fix that...(NAL length: "+naluLength+")");
254 +
255 + while (true) {
256 +
257 + header[0] = header[1];
258 + header[1] = header[2];
259 + header[2] = header[3];
260 + header[3] = header[4];
261 + header[4] = (byte) is.read();
262 +
263 + type = header[4]&0x1F;
264 +
265 + if (type == 5 || type == 1) {
266 + naluLength = header[3]&0xFF | (header[2]&0xFF)<<8 | (header[1]&0xFF)<<16 | (header[0]&0xFF)<<24;
267 + if (naluLength>0 && naluLength<100000) {
268 + oldtime = System.nanoTime();
269 + Log.e(TAG,"A NAL unit may have been found in the bit stream !");
270 + break;
271 + }
272 + if (naluLength==0) {
273 + Log.e(TAG,"NAL unit with NULL size found...");
274 + } else if (header[3]==0xFF && header[2]==0xFF && header[1]==0xFF && header[0]==0xFF) {
275 + Log.e(TAG,"NAL unit with 0xFFFFFFFF size found...");
276 + }
277 + }
278 +
279 + }
280 +
281 + }
282 +
283 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtp;
20 +
21 +import java.io.IOException;
22 +import java.io.InputStream;
23 +import java.nio.ByteBuffer;
24 +import android.annotation.SuppressLint;
25 +import android.media.MediaCodec;
26 +import android.media.MediaCodec.BufferInfo;
27 +import android.media.MediaFormat;
28 +import android.util.Log;
29 +
30 +/**
31 + * An InputStream that uses data from a MediaCodec.
32 + * The purpose of this class is to interface existing RTP packetizers of
33 + * libstreaming with the new MediaCodec API. This class is not thread safe !
34 + */
35 +@SuppressLint("NewApi")
36 +public class MediaCodecInputStream extends InputStream {
37 +
38 + public final String TAG = "MediaCodecInputStream";
39 +
40 + private MediaCodec mMediaCodec = null;
41 + private BufferInfo mBufferInfo = new BufferInfo();
42 + private ByteBuffer[] mBuffers = null;
43 + private ByteBuffer mBuffer = null;
44 + private int mIndex = -1;
45 + private boolean mClosed = false;
46 +
47 + public MediaFormat mMediaFormat;
48 +
49 + public MediaCodecInputStream(MediaCodec mediaCodec) {
50 + mMediaCodec = mediaCodec;
51 + mBuffers = mMediaCodec.getOutputBuffers();
52 + }
53 +
54 + @Override
55 + public void close() {
56 + mClosed = true;
57 + }
58 +
59 + @Override
60 + public int read() throws IOException {
61 + return 0;
62 + }
63 +
64 + @Override
65 + public int read(byte[] buffer, int offset, int length) throws IOException {
66 + int min = 0;
67 +
68 + try {
69 + if (mBuffer==null) {
70 + while (!Thread.interrupted() && !mClosed) {
71 + mIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 500000);
72 + if (mIndex>=0 ){
73 + //Log.d(TAG,"Index: "+mIndex+" Time: "+mBufferInfo.presentationTimeUs+" size: "+mBufferInfo.size);
74 + mBuffer = mBuffers[mIndex];
75 + mBuffer.position(0);
76 + break;
77 + } else if (mIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
78 + mBuffers = mMediaCodec.getOutputBuffers();
79 + } else if (mIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
80 + mMediaFormat = mMediaCodec.getOutputFormat();
81 + Log.i(TAG,mMediaFormat.toString());
82 + } else if (mIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
83 + Log.v(TAG,"No buffer available...");
84 + //return 0;
85 + } else {
86 + Log.e(TAG,"Message: "+mIndex);
87 + //return 0;
88 + }
89 + }
90 + }
91 +
92 + if (mClosed) throw new IOException("This InputStream was closed");
93 +
94 + min = length < mBufferInfo.size - mBuffer.position() ? length : mBufferInfo.size - mBuffer.position();
95 + mBuffer.get(buffer, offset, min);
96 + if (mBuffer.position()>=mBufferInfo.size) {
97 + mMediaCodec.releaseOutputBuffer(mIndex, false);
98 + mBuffer = null;
99 + }
100 +
101 + } catch (RuntimeException e) {
102 + e.printStackTrace();
103 + }
104 +
105 + return min;
106 + }
107 +
108 + public int available() {
109 + if (mBuffer != null)
110 + return mBufferInfo.size - mBuffer.position();
111 + else
112 + return 0;
113 + }
114 +
115 + public BufferInfo getLastBufferInfo() {
116 + return mBufferInfo;
117 + }
118 +
119 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtp;
20 +
21 +import java.io.IOException;
22 +import java.io.OutputStream;
23 +import java.net.DatagramPacket;
24 +import java.net.InetAddress;
25 +import java.net.MulticastSocket;
26 +import java.util.concurrent.Semaphore;
27 +import java.util.concurrent.TimeUnit;
28 +import net.majorkernelpanic.streaming.rtcp.SenderReport;
29 +import android.os.SystemClock;
30 +import android.util.Log;
31 +
32 +/**
33 + * A basic implementation of an RTP socket.
34 + * It implements a buffering mechanism, relying on a FIFO of buffers and a Thread.
35 + * That way, if a packetizer tries to send many packets too quickly, the FIFO will
36 + * grow and packets will be sent one by one smoothly.
37 + */
38 +public class RtpSocket implements Runnable {
39 +
40 + public static final String TAG = "RtpSocket";
41 +
42 + /** Use this to use UDP for the transport protocol. */
43 + public final static int TRANSPORT_UDP = 0x00;
44 +
45 + /** Use this to use TCP for the transport protocol. */
46 + public final static int TRANSPORT_TCP = 0x01;
47 +
48 + public static final int RTP_HEADER_LENGTH = 12;
49 + public static final int MTU = 1300;
50 +
51 + private MulticastSocket mSocket;
52 + private DatagramPacket[] mPackets;
53 + private byte[][] mBuffers;
54 + private long[] mTimestamps;
55 +
56 + private SenderReport mReport;
57 +
58 + private Semaphore mBufferRequested, mBufferCommitted;
59 + private Thread mThread;
60 +
61 + private int mTransport;
62 + private long mCacheSize;
63 + private long mClock = 0;
64 + private long mOldTimestamp = 0;
65 + private int mSsrc, mSeq = 0, mPort = -1;
66 + private int mBufferCount, mBufferIn, mBufferOut;
67 + private int mCount = 0;
68 + private byte mTcpHeader[];
69 + protected OutputStream mOutputStream = null;
70 +
71 + private AverageBitrate mAverageBitrate;
72 +
73 + /**
74 + * This RTP socket implements a buffering mechanism relying on a FIFO of buffers and a Thread.
75 + * @throws IOException
76 + */
77 + public RtpSocket() {
78 +
79 + mCacheSize = 0;
80 + mBufferCount = 300; // TODO: readjust that when the FIFO is full
81 + mBuffers = new byte[mBufferCount][];
82 + mPackets = new DatagramPacket[mBufferCount];
83 + mReport = new SenderReport();
84 + mAverageBitrate = new AverageBitrate();
85 + mTransport = TRANSPORT_UDP;
86 + mTcpHeader = new byte[] {'$',0,0,0};
87 +
88 + resetFifo();
89 +
90 + for (int i=0; i<mBufferCount; i++) {
91 +
92 + mBuffers[i] = new byte[MTU];
93 + mPackets[i] = new DatagramPacket(mBuffers[i], 1);
94 +
95 + /* Version(2) Padding(0) */
96 + /* ^ ^ Extension(0) */
97 + /* | | ^ */
98 + /* | -------- | */
99 + /* | |--------------------- */
100 + /* | || -----------------------> Source Identifier(0) */
101 + /* | || | */
102 + mBuffers[i][0] = (byte) Integer.parseInt("10000000",2);
103 +
104 + /* Payload Type */
105 + mBuffers[i][1] = (byte) 96;
106 +
107 + /* Byte 2,3 -> Sequence Number */
108 + /* Byte 4,5,6,7 -> Timestamp */
109 + /* Byte 8,9,10,11 -> Sync Source Identifier */
110 +
111 + }
112 +
113 + try {
114 + mSocket = new MulticastSocket();
115 + } catch (Exception e) {
116 + throw new RuntimeException(e.getMessage());
117 + }
118 +
119 + }
120 +
121 + private void resetFifo() {
122 + mCount = 0;
123 + mBufferIn = 0;
124 + mBufferOut = 0;
125 + mTimestamps = new long[mBufferCount];
126 + mBufferRequested = new Semaphore(mBufferCount);
127 + mBufferCommitted = new Semaphore(0);
128 + mReport.reset();
129 + mAverageBitrate.reset();
130 + }
131 +
132 + /** Closes the underlying socket. */
133 + public void close() {
134 + mSocket.close();
135 + }
136 +
137 + /** Sets the SSRC of the stream. */
138 + public void setSSRC(int ssrc) {
139 + this.mSsrc = ssrc;
140 + for (int i=0;i<mBufferCount;i++) {
141 + setLong(mBuffers[i], ssrc,8,12);
142 + }
143 + mReport.setSSRC(mSsrc);
144 + }
145 +
146 + /** Returns the SSRC of the stream. */
147 + public int getSSRC() {
148 + return mSsrc;
149 + }
150 +
151 + /** Sets the clock frequency of the stream in Hz. */
152 + public void setClockFrequency(long clock) {
153 + mClock = clock;
154 + }
155 +
156 + /** Sets the size of the FIFO in ms. */
157 + public void setCacheSize(long cacheSize) {
158 + mCacheSize = cacheSize;
159 + }
160 +
161 + /** Sets the Time To Live of the UDP packets. */
162 + public void setTimeToLive(int ttl) throws IOException {
163 + mSocket.setTimeToLive(ttl);
164 + }
165 +
166 + /** Sets the destination address and to which the packets will be sent. */
167 + public void setDestination(InetAddress dest, int dport, int rtcpPort) {
168 + if (dport != 0 && rtcpPort != 0) {
169 + mTransport = TRANSPORT_UDP;
170 + mPort = dport;
171 + for (int i=0;i<mBufferCount;i++) {
172 + mPackets[i].setPort(dport);
173 + mPackets[i].setAddress(dest);
174 + }
175 + mReport.setDestination(dest, rtcpPort);
176 + }
177 + }
178 +
179 + /**
180 + * If a TCP is used as the transport protocol for the RTP session,
181 + * the output stream to which RTP packets will be written to must
182 + * be specified with this method.
183 + */
184 + public void setOutputStream(OutputStream outputStream, byte channelIdentifier) {
185 + if (outputStream != null) {
186 + mTransport = TRANSPORT_TCP;
187 + mOutputStream = outputStream;
188 + mTcpHeader[1] = channelIdentifier;
189 + mReport.setOutputStream(outputStream, (byte) (channelIdentifier+1));
190 + }
191 + }
192 +
193 + public int getPort() {
194 + return mPort;
195 + }
196 +
197 + public int[] getLocalPorts() {
198 + return new int[] {
199 + mSocket.getLocalPort(),
200 + mReport.getLocalPort()
201 + };
202 +
203 + }
204 +
205 + /**
206 + * Returns an available buffer from the FIFO, it can then be modified.
207 + * Call {@link #commitBuffer(int)} to send it over the network.
208 + * @throws InterruptedException
209 + **/
210 + public byte[] requestBuffer() throws InterruptedException {
211 + mBufferRequested.acquire();
212 + mBuffers[mBufferIn][1] &= 0x7F;
213 + return mBuffers[mBufferIn];
214 + }
215 +
216 + /** Puts the buffer back into the FIFO without sending the packet. */
217 + public void commitBuffer() throws IOException {
218 +
219 + if (mThread == null) {
220 + mThread = new Thread(this);
221 + mThread.start();
222 + }
223 +
224 + if (++mBufferIn>=mBufferCount) mBufferIn = 0;
225 + mBufferCommitted.release();
226 +
227 + }
228 +
229 + /** Sends the RTP packet over the network. */
230 + public void commitBuffer(int length) throws IOException {
231 + updateSequence();
232 + mPackets[mBufferIn].setLength(length);
233 +
234 + mAverageBitrate.push(length);
235 +
236 + if (++mBufferIn>=mBufferCount) mBufferIn = 0;
237 + mBufferCommitted.release();
238 +
239 + if (mThread == null) {
240 + mThread = new Thread(this);
241 + mThread.start();
242 + }
243 +
244 + }
245 +
246 + /** Returns an approximation of the bitrate of the RTP stream in bits per second. */
247 + public long getBitrate() {
248 + return mAverageBitrate.average();
249 + }
250 +
251 + /** Increments the sequence number. */
252 + private void updateSequence() {
253 + setLong(mBuffers[mBufferIn], ++mSeq, 2, 4);
254 + }
255 +
256 + /**
257 + * Overwrites the timestamp in the packet.
258 + * @param timestamp The new timestamp in ns.
259 + **/
260 + public void updateTimestamp(long timestamp) {
261 + mTimestamps[mBufferIn] = timestamp;
262 + setLong(mBuffers[mBufferIn], (timestamp/100L)*(mClock/1000L)/10000L, 4, 8);
263 + }
264 +
265 + /** Sets the marker in the RTP packet. */
266 + public void markNextPacket() {
267 + mBuffers[mBufferIn][1] |= 0x80;
268 + }
269 +
270 + /** The Thread sends the packets in the FIFO one by one at a constant rate. */
271 + @Override
272 + public void run() {
273 + Statistics stats = new Statistics(50,3000);
274 + try {
275 + // Caches mCacheSize milliseconds of the stream in the FIFO.
276 + Thread.sleep(mCacheSize);
277 + long delta = 0;
278 + while (mBufferCommitted.tryAcquire(4,TimeUnit.SECONDS)) {
279 + if (mOldTimestamp != 0) {
280 + // We use our knowledge of the clock rate of the stream and the difference between two timestamps to
281 + // compute the time lapse that the packet represents.
282 + if ((mTimestamps[mBufferOut]-mOldTimestamp)>0) {
283 + stats.push(mTimestamps[mBufferOut]-mOldTimestamp);
284 + long d = stats.average()/1000000;
285 + //Log.d(TAG,"delay: "+d+" d: "+(mTimestamps[mBufferOut]-mOldTimestamp)/1000000);
286 + // We ensure that packets are sent at a constant and suitable rate no matter how the RtpSocket is used.
287 + if (mCacheSize>0) Thread.sleep(d);
288 + } else if ((mTimestamps[mBufferOut]-mOldTimestamp)<0) {
289 + Log.e(TAG, "TS: "+mTimestamps[mBufferOut]+" OLD: "+mOldTimestamp);
290 + }
291 + delta += mTimestamps[mBufferOut]-mOldTimestamp;
292 + if (delta>500000000 || delta<0) {
293 + //Log.d(TAG,"permits: "+mBufferCommitted.availablePermits());
294 + delta = 0;
295 + }
296 + }
297 + mReport.update(mPackets[mBufferOut].getLength(), (mTimestamps[mBufferOut]/100L)*(mClock/1000L)/10000L);
298 + mOldTimestamp = mTimestamps[mBufferOut];
299 + if (mCount++>30) {
300 + if (mTransport == TRANSPORT_UDP) {
301 + mSocket.send(mPackets[mBufferOut]);
302 + } else {
303 + sendTCP();
304 + }
305 + }
306 + if (++mBufferOut>=mBufferCount) mBufferOut = 0;
307 + mBufferRequested.release();
308 + }
309 + } catch (Exception e) {
310 + e.printStackTrace();
311 + }
312 + mThread = null;
313 + resetFifo();
314 + }
315 +
316 + private void sendTCP() {
317 + synchronized (mOutputStream) {
318 + int len = mPackets[mBufferOut].getLength();
319 + Log.d(TAG,"sent "+len);
320 + mTcpHeader[2] = (byte) (len>>8);
321 + mTcpHeader[3] = (byte) (len&0xFF);
322 + try {
323 + mOutputStream.write(mTcpHeader);
324 + mOutputStream.write(mBuffers[mBufferOut], 0, len);
325 + } catch (Exception e) {}
326 + }
327 + }
328 +
329 + private void setLong(byte[] buffer, long n, int begin, int end) {
330 + for (end--; end >= begin; end--) {
331 + buffer[end] = (byte) (n % 256);
332 + n >>= 8;
333 + }
334 + }
335 +
336 + /**
337 + * Computes an average bit rate.
338 + **/
339 + protected static class AverageBitrate {
340 +
341 + private final static long RESOLUTION = 200;
342 +
343 + private long mOldNow, mNow, mDelta;
344 + private long[] mElapsed, mSum;
345 + private int mCount, mIndex, mTotal;
346 + private int mSize;
347 +
348 + public AverageBitrate() {
349 + mSize = 5000/((int)RESOLUTION);
350 + reset();
351 + }
352 +
353 + public AverageBitrate(int delay) {
354 + mSize = delay/((int)RESOLUTION);
355 + reset();
356 + }
357 +
358 + public void reset() {
359 + mSum = new long[mSize];
360 + mElapsed = new long[mSize];
361 + mNow = SystemClock.elapsedRealtime();
362 + mOldNow = mNow;
363 + mCount = 0;
364 + mDelta = 0;
365 + mTotal = 0;
366 + mIndex = 0;
367 + }
368 +
369 + public void push(int length) {
370 + mNow = SystemClock.elapsedRealtime();
371 + if (mCount>0) {
372 + mDelta += mNow - mOldNow;
373 + mTotal += length;
374 + if (mDelta>RESOLUTION) {
375 + mSum[mIndex] = mTotal;
376 + mTotal = 0;
377 + mElapsed[mIndex] = mDelta;
378 + mDelta = 0;
379 + mIndex++;
380 + if (mIndex>=mSize) mIndex = 0;
381 + }
382 + }
383 + mOldNow = mNow;
384 + mCount++;
385 + }
386 +
387 + public int average() {
388 + long delta = 0, sum = 0;
389 + for (int i=0;i<mSize;i++) {
390 + sum += mSum[i];
391 + delta += mElapsed[i];
392 + }
393 + //Log.d(TAG, "Time elapsed: "+delta);
394 + return (int) (delta>0?8000*sum/delta:0);
395 + }
396 +
397 + }
398 +
399 + /** Computes the proper rate at which packets are sent. */
400 + protected static class Statistics {
401 +
402 + public final static String TAG = "Statistics";
403 +
404 + private int count=500, c = 0;
405 + private float m = 0, q = 0;
406 + private long elapsed = 0;
407 + private long start = 0;
408 + private long duration = 0;
409 + private long period = 6000000000L;
410 + private boolean initoffset = false;
411 +
412 + public Statistics(int count, long period) {
413 + this.count = count;
414 + this.period = period*1000000L;
415 + }
416 +
417 + public void push(long value) {
418 + duration += value;
419 + elapsed += value;
420 + if (elapsed>period) {
421 + elapsed = 0;
422 + long now = System.nanoTime();
423 + if (!initoffset || (now - start < 0)) {
424 + start = now;
425 + duration = 0;
426 + initoffset = true;
427 + }
428 + value -= (now - start) - duration;
429 + //Log.d(TAG, "sum1: "+duration/1000000+" sum2: "+(now-start)/1000000+" drift: "+((now-start)-duration)/1000000+" v: "+value/1000000);
430 + }
431 + if (c<40) {
432 + // We ignore the first 40 measured values because they may not be accurate
433 + c++;
434 + m = value;
435 + } else {
436 + m = (m*q+value)/(q+1);
437 + if (q<count) q++;
438 + }
439 + }
440 +
441 + public long average() {
442 + long l = (long)m-2000000;
443 + return l>0 ? l : 0;
444 + }
445 +
446 + }
447 +
448 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtsp;
20 +
21 +import java.io.IOException;
22 +import java.io.InputStream;
23 +import java.io.PipedInputStream;
24 +import java.io.PipedOutputStream;
25 +
26 +class RtcpDeinterleaver extends InputStream implements Runnable {
27 +
28 + public final static String TAG = "RtcpDeinterleaver";
29 +
30 + private IOException mIOException;
31 + private InputStream mInputStream;
32 + private PipedInputStream mPipedInputStream;
33 + private PipedOutputStream mPipedOutputStream;
34 + private byte[] mBuffer;
35 +
36 + public RtcpDeinterleaver(InputStream inputStream) {
37 + mInputStream = inputStream;
38 + mPipedInputStream = new PipedInputStream(4096);
39 + try {
40 + mPipedOutputStream = new PipedOutputStream(mPipedInputStream);
41 + } catch (IOException e) {}
42 + mBuffer = new byte[1024];
43 + new Thread(this).start();
44 + }
45 +
46 + @Override
47 + public void run() {
48 + try {
49 + while (true) {
50 + int len = mInputStream.read(mBuffer, 0, 1024);
51 + mPipedOutputStream.write(mBuffer, 0, len);
52 + }
53 + } catch (IOException e) {
54 + try {
55 + mPipedInputStream.close();
56 + } catch (IOException ignore) {}
57 + mIOException = e;
58 + }
59 + }
60 +
61 + @Override
62 + public int read(byte[] buffer) throws IOException {
63 + if (mIOException != null) {
64 + throw mIOException;
65 + }
66 + return mPipedInputStream.read(buffer);
67 + }
68 +
69 + @Override
70 + public int read(byte[] buffer, int offset, int length) throws IOException {
71 + if (mIOException != null) {
72 + throw mIOException;
73 + }
74 + return mPipedInputStream.read(buffer, offset, length);
75 + }
76 +
77 + @Override
78 + public int read() throws IOException {
79 + if (mIOException != null) {
80 + throw mIOException;
81 + }
82 + return mPipedInputStream.read();
83 + }
84 +
85 + @Override
86 + public void close() throws IOException {
87 + mInputStream.close();
88 + }
89 +
90 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtsp;
20 +
21 +import java.io.BufferedOutputStream;
22 +import java.io.BufferedReader;
23 +import java.io.IOException;
24 +import java.io.InputStreamReader;
25 +import java.io.OutputStream;
26 +import java.io.UnsupportedEncodingException;
27 +import java.net.Socket;
28 +import java.net.SocketException;
29 +import java.security.MessageDigest;
30 +import java.security.NoSuchAlgorithmException;
31 +import java.util.HashMap;
32 +import java.util.Locale;
33 +import java.util.concurrent.Semaphore;
34 +import java.util.regex.Matcher;
35 +import java.util.regex.Pattern;
36 +import net.majorkernelpanic.streaming.Session;
37 +import net.majorkernelpanic.streaming.Stream;
38 +import net.majorkernelpanic.streaming.rtp.RtpSocket;
39 +import android.os.Handler;
40 +import android.os.HandlerThread;
41 +import android.os.Looper;
42 +import android.util.Log;
43 +
44 +/**
45 + * RFC 2326.
46 + * A basic and asynchronous RTSP client.
47 + * The original purpose of this class was to implement a small RTSP client compatible with Wowza.
48 + * It implements Digest Access Authentication according to RFC 2069.
49 + */
50 +public class RtspClient {
51 +
52 + public final static String TAG = "RtspClient";
53 +
54 + /** Message sent when the connection to the RTSP server failed. */
55 + public final static int ERROR_CONNECTION_FAILED = 0x01;
56 +
57 + /** Message sent when the credentials are wrong. */
58 + public final static int ERROR_WRONG_CREDENTIALS = 0x03;
59 +
60 + /** Use this to use UDP for the transport protocol. */
61 + public final static int TRANSPORT_UDP = RtpSocket.TRANSPORT_UDP;
62 +
63 + /** Use this to use TCP for the transport protocol. */
64 + public final static int TRANSPORT_TCP = RtpSocket.TRANSPORT_TCP;
65 +
66 + /**
67 + * Message sent when the connection with the RTSP server has been lost for
68 + * some reason (for example, the user is going under a bridge).
69 + * When the connection with the server is lost, the client will automatically try to
70 + * reconnect as long as {@link #stopStream()} is not called.
71 + **/
72 + public final static int ERROR_CONNECTION_LOST = 0x04;
73 +
74 + /**
75 + * Message sent when the connection with the RTSP server has been reestablished.
76 + * When the connection with the server is lost, the client will automatically try to
77 + * reconnect as long as {@link #stopStream()} is not called.
78 + */
79 + public final static int MESSAGE_CONNECTION_RECOVERED = 0x05;
80 +
81 + private final static int STATE_STARTED = 0x00;
82 + private final static int STATE_STARTING = 0x01;
83 + private final static int STATE_STOPPING = 0x02;
84 + private final static int STATE_STOPPED = 0x03;
85 + private int mState = 0;
86 +
87 + private class Parameters {
88 + public String host;
89 + public String username;
90 + public String password;
91 + public String path;
92 + public Session session;
93 + public int port;
94 + public int transport;
95 +
96 + public Parameters clone() {
97 + Parameters params = new Parameters();
98 + params.host = host;
99 + params.username = username;
100 + params.password = password;
101 + params.path = path;
102 + params.session = session;
103 + params.port = port;
104 + params.transport = transport;
105 + return params;
106 + }
107 + }
108 +
109 +
110 + private Parameters mTmpParameters;
111 + private Parameters mParameters;
112 +
113 + private int mCSeq;
114 + private Socket mSocket;
115 + private String mSessionID;
116 + private String mAuthorization;
117 + private BufferedReader mBufferedReader;
118 + private OutputStream mOutputStream;
119 + private Callback mCallback;
120 + private Handler mMainHandler;
121 + private Handler mHandler;
122 +
123 + /**
124 + * The callback interface you need to implement to know what's going on with the
125 + * RTSP server (for example your Wowza Media Server).
126 + */
127 + public interface Callback {
128 + public void onRtspUpdate(int message, Exception exception);
129 + }
130 +
131 + public RtspClient() {
132 + mCSeq = 0;
133 + mTmpParameters = new Parameters();
134 + mTmpParameters.port = 1935;
135 + mTmpParameters.path = "/";
136 + mTmpParameters.transport = TRANSPORT_UDP;
137 + mAuthorization = null;
138 + mCallback = null;
139 + mMainHandler = new Handler(Looper.getMainLooper());
140 + mState = STATE_STOPPED;
141 +
142 + final Semaphore signal = new Semaphore(0);
143 + new HandlerThread("net.majorkernelpanic.streaming.RtspClient"){
144 + @Override
145 + protected void onLooperPrepared() {
146 + mHandler = new Handler();
147 + signal.release();
148 + }
149 + }.start();
150 + signal.acquireUninterruptibly();
151 +
152 + }
153 +
154 + /**
155 + * Sets the callback interface that will be called on status updates of the connection
156 + * with the RTSP server.
157 + * @param cb The implementation of the {@link Callback} interface
158 + */
159 + public void setCallback(Callback cb) {
160 + mCallback = cb;
161 + }
162 +
163 + /**
164 + * The {@link Session} that will be used to stream to the server.
165 + * If not called before {@link #startStream()}, a it will be created.
166 + */
167 + public void setSession(Session session) {
168 + mTmpParameters.session = session;
169 + }
170 +
171 + public Session getSession() {
172 + return mTmpParameters.session;
173 + }
174 +
175 + /**
176 + * Sets the destination address of the RTSP server.
177 + * @param host The destination address
178 + * @param port The destination port
179 + */
180 + public void setServerAddress(String host, int port) {
181 + mTmpParameters.port = port;
182 + mTmpParameters.host = host;
183 + }
184 +
185 + /**
186 + * If authentication is enabled on the server, you need to call this with a valid login/password pair.
187 + * Only implements Digest Access Authentication according to RFC 2069.
188 + * @param username The login
189 + * @param password The password
190 + */
191 + public void setCredentials(String username, String password) {
192 + mTmpParameters.username = username;
193 + mTmpParameters.password = password;
194 + }
195 +
196 + /**
197 + * The path to which the stream will be sent to.
198 + * @param path The path
199 + */
200 + public void setStreamPath(String path) {
201 + mTmpParameters.path = path;
202 + }
203 +
204 + /**
205 + * Call this with {@link #TRANSPORT_TCP} or {@value #TRANSPORT_UDP} to choose the
206 + * transport protocol that will be used to send RTP/RTCP packets.
207 + * Not ready yet !
208 + */
209 + public void setTransportMode(int mode) {
210 + mTmpParameters.transport = mode;
211 + }
212 +
213 + public boolean isStreaming() {
214 + return mState==STATE_STARTED||mState==STATE_STARTING;
215 + }
216 +
217 + /**
218 + * Connects to the RTSP server to publish the stream, and the effectively starts streaming.
219 + * You need to call {@link #setServerAddress(String, int)} and optionally {@link #setSession(Session)}
220 + * and {@link #setCredentials(String, String)} before calling this.
221 + * Should be called of the main thread !
222 + */
223 + public void startStream() {
224 + if (mTmpParameters.host == null) throw new IllegalStateException("setServerAddress(String,int) has not been called !");
225 + if (mTmpParameters.session == null) throw new IllegalStateException("setSession() has not been called !");
226 + mHandler.post(new Runnable () {
227 + @Override
228 + public void run() {
229 + if (mState != STATE_STOPPED) return;
230 + mState = STATE_STARTING;
231 +
232 + Log.d(TAG,"Connecting to RTSP server...");
233 +
234 + // If the user calls some methods to configure the client, it won't modify its behavior until the stream is restarted
235 + mParameters = mTmpParameters.clone();
236 + mParameters.session.setDestination(mTmpParameters.host);
237 +
238 + try {
239 + mParameters.session.syncConfigure();
240 + } catch (Exception e) {
241 + mParameters.session = null;
242 + mState = STATE_STOPPED;
243 + return;
244 + }
245 +
246 + try {
247 + tryConnection();
248 + } catch (Exception e) {
249 + postError(ERROR_CONNECTION_FAILED, e);
250 + abort();
251 + return;
252 + }
253 +
254 + try {
255 + mParameters.session.syncStart();
256 + mState = STATE_STARTED;
257 + if (mParameters.transport == TRANSPORT_UDP) {
258 + mHandler.post(mConnectionMonitor);
259 + }
260 + } catch (Exception e) {
261 + abort();
262 + }
263 +
264 + }
265 + });
266 +
267 + }
268 +
269 + /**
270 + * Stops the stream, and informs the RTSP server.
271 + */
272 + public void stopStream() {
273 + mHandler.post(new Runnable () {
274 + @Override
275 + public void run() {
276 + if (mParameters != null && mParameters.session != null) {
277 + mParameters.session.stop();
278 + }
279 + if (mState != STATE_STOPPED) {
280 + mState = STATE_STOPPING;
281 + abort();
282 + }
283 + }
284 + });
285 + }
286 +
287 + public void release() {
288 + stopStream();
289 + mHandler.getLooper().quit();
290 + }
291 +
292 + private void abort() {
293 + try {
294 + sendRequestTeardown();
295 + } catch (Exception ignore) {}
296 + try {
297 + mSocket.close();
298 + } catch (Exception ignore) {}
299 + mHandler.removeCallbacks(mConnectionMonitor);
300 + mHandler.removeCallbacks(mRetryConnection);
301 + mState = STATE_STOPPED;
302 + }
303 +
304 + private void tryConnection() throws IOException {
305 + mCSeq = 0;
306 + mSocket = new Socket(mParameters.host, mParameters.port);
307 + mBufferedReader = new BufferedReader(new InputStreamReader(mSocket.getInputStream()));
308 + mOutputStream = new BufferedOutputStream(mSocket.getOutputStream());
309 + sendRequestAnnounce();
310 + sendRequestSetup();
311 + Log.i("tryConnection : ", "rec");
312 +
313 + sendRequestRecord();
314 + }
315 +
316 + /**
317 + * Forges and sends the ANNOUNCE request
318 + */
319 + private void sendRequestAnnounce() throws IllegalStateException, SocketException, IOException {
320 + String body = mParameters.session.getSessionDescription();
321 + String request = "ANNOUNCE rtsp://"+mParameters.host+":"+mParameters.port+mParameters.path+" RTSP/1.0\r\n" +
322 + "CSeq: " + (++mCSeq) + "\r\n" +
323 + "Content-Length: " + body.length() + "\r\n" +
324 + "Content-Type: application/sdp\r\n\r\n" +
325 + body;
326 +
327 + mOutputStream.write(request.getBytes("UTF-8"));
328 + mOutputStream.flush();
329 +
330 + }
331 +
332 + /**
333 + * Forges and sends the SETUP request
334 + */
335 + private void sendRequestSetup() throws IllegalStateException, SocketException, IOException {
336 + // audio와 video session을 가져옴
337 + for (int i=0;i<2;i++) {
338 + Stream stream = mParameters.session.getTrack(i);
339 + if (stream != null) {
340 + String params = mParameters.transport==TRANSPORT_TCP ?
341 + ("TCP;interleaved="+2*i+"-"+(2*i+1)) : ("UDP;unicast;client_port="+(5000+2*i)+"-"+(5000+2*i+1)+";mode=receive");
342 + String request = "SETUP rtsp://"+mParameters.host+":"+mParameters.port+mParameters.path+"/trackID="+i+" RTSP/1.0\r\n" +
343 + "Transport: RTP/AVP/"+params+"\r\n" +
344 + addHeaders();
345 +
346 + Log.i(TAG,request.substring(0, request.indexOf("\r\n")));
347 +
348 + mOutputStream.write(request.getBytes("UTF-8"));
349 + mOutputStream.flush();
350 +
351 + }
352 + }
353 + }
354 +
355 + /**
356 + * Forges and sends the RECORD request
357 + */
358 + private void sendRequestRecord() throws IllegalStateException, SocketException, IOException {
359 + String request = "RECORD rtsp://"+mParameters.host+":"+mParameters.port+mParameters.path+" RTSP/1.0\r\n" +
360 + "Range: npt=0.000-\r\n" +
361 + addHeaders();
362 + Log.i(TAG,request.substring(0, request.indexOf("\r\n")));
363 + mOutputStream.write(request.getBytes("UTF-8"));
364 + mOutputStream.flush();
365 +
366 + }
367 +
368 + /**
369 + * Forges and sends the TEARDOWN request
370 + */
371 + private void sendRequestTeardown() throws IOException {
372 + String request = "TEARDOWN rtsp://"+mParameters.host+":"+mParameters.port+mParameters.path+" RTSP/1.0\r\n" + addHeaders();
373 + Log.i(TAG,request.substring(0, request.indexOf("\r\n")));
374 + mOutputStream.write(request.getBytes("UTF-8"));
375 + mOutputStream.flush();
376 + }
377 +
378 + /**
379 + * Forges and sends the OPTIONS request
380 + */
381 + private void sendRequestOption() throws IOException {
382 + String request = "OPTIONS rtsp://"+mParameters.host+":"+mParameters.port+mParameters.path+" RTSP/1.0\r\n" + addHeaders();
383 + Log.i(TAG,request.substring(0, request.indexOf("\r\n")));
384 + mOutputStream.write(request.getBytes("UTF-8"));
385 + mOutputStream.flush();
386 + Response.parseResponse(mBufferedReader);
387 + }
388 +
389 + private String addHeaders() {
390 + return "CSeq: " + (++mCSeq) + "\r\n" +
391 + "Content-Length: 0\r\n" +
392 + "Session: " + mSessionID + "\r\n" +
393 + // For some reason you may have to remove last "\r\n" in the next line to make the RTSP client work with your wowza server :/
394 + (mAuthorization != null ? "Authorization: " + mAuthorization + "\r\n":"") + "\r\n";
395 + }
396 +
397 + /**
398 + * If the connection with the RTSP server is lost, we try to reconnect to it as
399 + * long as {@link #stopStream()} is not called.
400 + */
401 + private Runnable mConnectionMonitor = new Runnable() {
402 + @Override
403 + public void run() {
404 + if (mState == STATE_STARTED) {
405 + try {
406 + // We poll the RTSP server with OPTION requests
407 + sendRequestOption();
408 + mHandler.postDelayed(mConnectionMonitor, 6000);
409 + } catch (IOException e) {
410 + // Happens if the OPTION request fails
411 + postMessage(ERROR_CONNECTION_LOST);
412 + Log.e(TAG, "Connection lost with the server...");
413 + mParameters.session.stop();
414 + mHandler.post(mRetryConnection);
415 + }
416 + }
417 + }
418 + };
419 +
420 + /** Here, we try to reconnect to the RTSP. */
421 + private Runnable mRetryConnection = new Runnable() {
422 + @Override
423 + public void run() {
424 + if (mState == STATE_STARTED) {
425 + try {
426 + Log.e(TAG, "Trying to reconnect...");
427 + tryConnection();
428 + try {
429 + mParameters.session.start();
430 + mHandler.post(mConnectionMonitor);
431 + postMessage(MESSAGE_CONNECTION_RECOVERED);
432 + } catch (Exception e) {
433 + abort();
434 + }
435 + } catch (IOException e) {
436 + mHandler.postDelayed(mRetryConnection,1000);
437 + }
438 + }
439 + }
440 + };
441 +
442 + final protected static char[] hexArray = {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
443 +
444 + private static String bytesToHex(byte[] bytes) {
445 + char[] hexChars = new char[bytes.length * 2];
446 + int v;
447 + for ( int j = 0; j < bytes.length; j++ ) {
448 + v = bytes[j] & 0xFF;
449 + hexChars[j * 2] = hexArray[v >>> 4];
450 + hexChars[j * 2 + 1] = hexArray[v & 0x0F];
451 + }
452 + return new String(hexChars);
453 + }
454 +
455 + /** Needed for the Digest Access Authentication. */
456 + private String computeMd5Hash(String buffer) {
457 + MessageDigest md;
458 + try {
459 + md = MessageDigest.getInstance("MD5");
460 + return bytesToHex(md.digest(buffer.getBytes("UTF-8")));
461 + } catch (NoSuchAlgorithmException ignore) {
462 + } catch (UnsupportedEncodingException e) {}
463 + return "";
464 + }
465 +
466 + private void postMessage(final int message) {
467 + mMainHandler.post(new Runnable() {
468 + @Override
469 + public void run() {
470 + if (mCallback != null) {
471 + mCallback.onRtspUpdate(message, null);
472 + }
473 + }
474 + });
475 + }
476 +
477 + private void postError(final int message, final Exception e) {
478 + mMainHandler.post(new Runnable() {
479 + @Override
480 + public void run() {
481 + if (mCallback != null) {
482 + mCallback.onRtspUpdate(message, e);
483 + }
484 + }
485 + });
486 + }
487 +
488 + static class Response {
489 +
490 + // Parses method & uri
491 + public static final Pattern regexStatus = Pattern.compile("RTSP/\\d.\\d (\\d+) (\\w+)",Pattern.CASE_INSENSITIVE);
492 + // Parses a request header
493 + public static final Pattern rexegHeader = Pattern.compile("(\\S+):(.+)",Pattern.CASE_INSENSITIVE);
494 + // Parses a WWW-Authenticate header
495 + public static final Pattern rexegAuthenticate = Pattern.compile("realm=\"(.+)\",\\s+nonce=\"(\\w+)\"",Pattern.CASE_INSENSITIVE);
496 + // Parses a Session header
497 + public static final Pattern rexegSession = Pattern.compile("(\\d+)",Pattern.CASE_INSENSITIVE);
498 + // Parses a Transport header
499 + public static final Pattern rexegTransport = Pattern.compile("client_port=(\\d+)-(\\d+).+server_port=(\\d+)-(\\d+)",Pattern.CASE_INSENSITIVE);
500 +
501 +
502 + public int status;
503 + public HashMap<String,String> headers = new HashMap<>();
504 +
505 + /** Parse the method, URI & headers of a RTSP request */
506 + public static Response parseResponse(BufferedReader input) throws IOException, IllegalStateException, SocketException {
507 + Response response = new Response();
508 + String line;
509 + Matcher matcher;
510 + // Parsing request method & URI
511 + if ((line = input.readLine())==null) throw new SocketException("Connection lost");
512 + Log.i("parseResponse", regexStatus.matcher(line).toString());
513 + matcher = regexStatus.matcher(line);
514 + matcher.find();
515 + response.status = Integer.parseInt(matcher.group(1));
516 +
517 + // Parsing headers of the request
518 + while ( (line = input.readLine()) != null) {
519 + //Log.e(TAG,"l: "+line.length()+", c: "+line);
520 + if (line.length()>3) {
521 + matcher = rexegHeader.matcher(line);
522 + matcher.find();
523 + response.headers.put(matcher.group(1).toLowerCase(Locale.US),matcher.group(2));
524 + } else {
525 + break;
526 + }
527 + }
528 + if (line==null) throw new SocketException("Connection lost");
529 +
530 + Log.d(TAG, "Response from server: "+response.status);
531 +
532 + return response;
533 + }
534 + }
535 +
536 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtsp;
20 +
21 +import java.io.BufferedReader;
22 +import java.io.IOException;
23 +import java.io.InputStreamReader;
24 +import java.io.OutputStream;
25 +import java.net.BindException;
26 +import java.net.InetAddress;
27 +import java.net.ServerSocket;
28 +import java.net.Socket;
29 +import java.net.SocketException;
30 +import java.util.HashMap;
31 +import java.util.LinkedList;
32 +import java.util.Locale;
33 +import java.util.WeakHashMap;
34 +import java.util.regex.Matcher;
35 +import java.util.regex.Pattern;
36 +import net.majorkernelpanic.streaming.Session;
37 +import net.majorkernelpanic.streaming.SessionBuilder;
38 +import android.app.Service;
39 +import android.content.Intent;
40 +import android.content.SharedPreferences;
41 +import android.content.SharedPreferences.Editor;
42 +import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
43 +import android.os.Binder;
44 +import android.os.IBinder;
45 +import android.preference.PreferenceManager;
46 +import android.util.Base64;
47 +import android.util.Log;
48 +
49 +/**
50 + * Implementation of a subset of the RTSP protocol (RFC 2326).
51 + *
52 + * It allows remote control of an android device cameras & microphone.
53 + * For each connected client, a Session is instantiated.
54 + * The Session will start or stop streams according to what the client wants.
55 + *
56 + */
57 +public class RtspServer extends Service {
58 +
59 + public final static String TAG = "RtspServer";
60 +
61 + /** The server name that will appear in responses. */
62 + public static String SERVER_NAME = "MajorKernelPanic RTSP Server";
63 +
64 + /** Port used by default. */
65 + public static final int DEFAULT_RTSP_PORT = 8086;
66 +
67 + /** Port already in use. */
68 + public final static int ERROR_BIND_FAILED = 0x00;
69 +
70 + /** A stream could not be started. */
71 + public final static int ERROR_START_FAILED = 0x01;
72 +
73 + /** Streaming started. */
74 + public final static int MESSAGE_STREAMING_STARTED = 0X00;
75 +
76 + /** Streaming stopped. */
77 + public final static int MESSAGE_STREAMING_STOPPED = 0X01;
78 +
79 + /** Key used in the SharedPreferences to store whether the RTSP server is enabled or not. */
80 + public final static String KEY_ENABLED = "rtsp_enabled";
81 +
82 + /** Key used in the SharedPreferences for the port used by the RTSP server. */
83 + public final static String KEY_PORT = "rtsp_port";
84 +
85 + protected SessionBuilder mSessionBuilder;
86 + protected SharedPreferences mSharedPreferences;
87 + protected boolean mEnabled = true;
88 + protected int mPort = DEFAULT_RTSP_PORT;
89 + protected WeakHashMap<Session,Object> mSessions = new WeakHashMap<>(2);
90 +
91 + private RequestListener mListenerThread;
92 + private final IBinder mBinder = new LocalBinder();
93 + private boolean mRestart = false;
94 + private final LinkedList<CallbackListener> mListeners = new LinkedList<>();
95 +
96 + /** Credentials for Basic Auth */
97 + private String mUsername;
98 + private String mPassword;
99 +
100 +
101 + public RtspServer() {
102 + }
103 +
104 + /** Be careful: those callbacks won't necessarily be called from the ui thread ! */
105 + public interface CallbackListener {
106 +
107 + /** Called when an error occurs. */
108 + void onError(RtspServer server, Exception e, int error);
109 +
110 + /** Called when streaming starts/stops. */
111 + void onMessage(RtspServer server, int message);
112 +
113 + }
114 +
115 + /**
116 + * See {@link RtspServer.CallbackListener} to check out what events will be fired once you set up a listener.
117 + * @param listener The listener
118 + */
119 + public void addCallbackListener(CallbackListener listener) {
120 + synchronized (mListeners) {
121 + if (!mListeners.isEmpty()) {
122 + for (CallbackListener cl : mListeners) {
123 + if (cl == listener) return;
124 + }
125 + }
126 + mListeners.add(listener);
127 + }
128 + }
129 +
130 + /**
131 + * Removes the listener.
132 + * @param listener The listener
133 + */
134 + public void removeCallbackListener(CallbackListener listener) {
135 + synchronized (mListeners) {
136 + mListeners.remove(listener);
137 + }
138 + }
139 +
140 + /** Returns the port used by the RTSP server. */
141 + public int getPort() {
142 + return mPort;
143 + }
144 +
145 + /**
146 + * Sets the port for the RTSP server to use.
147 + * @param port The port
148 + */
149 + public void setPort(int port) {
150 + Editor editor = mSharedPreferences.edit();
151 + editor.putString(KEY_PORT, String.valueOf(port));
152 + editor.commit();
153 + }
154 +
155 + /**
156 + * Set Basic authorization to access RTSP Stream
157 + * @param username username
158 + * @param password password
159 + */
160 + public void setAuthorization(String username, String password)
161 + {
162 + mUsername = username;
163 + mPassword = password;
164 + }
165 +
166 + /**
167 + * Starts (or restart if needed, if for example the configuration
168 + * of the server has been modified) the RTSP server.
169 + */
170 + public void start() {
171 + if (!mEnabled || mRestart) stop();
172 + if (mEnabled && mListenerThread == null) {
173 + try {
174 + mListenerThread = new RequestListener();
175 + } catch (Exception e) {
176 + mListenerThread = null;
177 + }
178 + }
179 + mRestart = false;
180 + }
181 +
182 + /**
183 + * Stops the RTSP server but not the Android Service.
184 + * To stop the Android Service you need to call {@link android.content.Context#stopService(Intent)};
185 + */
186 + public void stop() {
187 + if (mListenerThread != null) {
188 + try {
189 + mListenerThread.kill();
190 + for ( Session session : mSessions.keySet() ) {
191 + if ( session != null && session.isStreaming() ) {
192 + session.stop();
193 + }
194 + }
195 + } catch (Exception e) {
196 + } finally {
197 + mListenerThread = null;
198 + }
199 + }
200 + }
201 +
202 + /** Returns whether or not the RTSP server is streaming to some client(s). */
203 + public boolean isStreaming() {
204 + for ( Session session : mSessions.keySet() ) {
205 + if ( session != null && session.isStreaming() ) {
206 + return true;
207 + }
208 + }
209 + return false;
210 + }
211 +
212 + public boolean isEnabled() {
213 + return mEnabled;
214 + }
215 +
216 + /** Returns the bandwidth consumed by the RTSP server in bits per second. */
217 + public long getBitrate() {
218 + long bitrate = 0;
219 + for ( Session session : mSessions.keySet() ) {
220 + if ( session != null && session.isStreaming() ) {
221 + bitrate += session.getBitrate();
222 + }
223 + }
224 + return bitrate;
225 + }
226 +
227 + @Override
228 + public int onStartCommand(Intent intent, int flags, int startId) {
229 + return START_STICKY;
230 + }
231 +
232 + @Override
233 + public void onCreate() {
234 +
235 + // Let's restore the state of the service
236 + mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
237 + mPort = Integer.parseInt(mSharedPreferences.getString(KEY_PORT, String.valueOf(mPort)));
238 + mEnabled = mSharedPreferences.getBoolean(KEY_ENABLED, mEnabled);
239 +
240 + // If the configuration is modified, the server will adjust
241 + mSharedPreferences.registerOnSharedPreferenceChangeListener(mOnSharedPreferenceChangeListener);
242 +
243 + start();
244 + }
245 +
246 + @Override
247 + public void onDestroy() {
248 + stop();
249 + mSharedPreferences.unregisterOnSharedPreferenceChangeListener(mOnSharedPreferenceChangeListener);
250 + }
251 +
252 + private OnSharedPreferenceChangeListener mOnSharedPreferenceChangeListener = new OnSharedPreferenceChangeListener() {
253 + @Override
254 + public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
255 +
256 + if (key.equals(KEY_PORT)) {
257 + int port = Integer.parseInt(sharedPreferences.getString(KEY_PORT, String.valueOf(mPort)));
258 + if (port != mPort) {
259 + mPort = port;
260 + mRestart = true;
261 + start();
262 + }
263 + }
264 + else if (key.equals(KEY_ENABLED)) {
265 + mEnabled = sharedPreferences.getBoolean(KEY_ENABLED, mEnabled);
266 + start();
267 + }
268 + }
269 + };
270 +
271 + /** The Binder you obtain when a connection with the Service is established. */
272 + public class LocalBinder extends Binder {
273 + public RtspServer getService() {
274 + return RtspServer.this;
275 + }
276 + }
277 +
278 + @Override
279 + public IBinder onBind(Intent intent) {
280 + return mBinder;
281 + }
282 +
283 + protected void postMessage(int id) {
284 + synchronized (mListeners) {
285 + if (!mListeners.isEmpty()) {
286 + for (CallbackListener cl : mListeners) {
287 + cl.onMessage(this, id);
288 + }
289 + }
290 + }
291 + }
292 +
293 + protected void postError(Exception exception, int id) {
294 + synchronized (mListeners) {
295 + if (!mListeners.isEmpty()) {
296 + for (CallbackListener cl : mListeners) {
297 + cl.onError(this, exception, id);
298 + }
299 + }
300 + }
301 + }
302 +
303 + /**
304 + * By default the RTSP uses {@link UriParser} to parse the URI requested by the client
305 + * but you can change that behavior by override this method.
306 + * @param uri The uri that the client has requested
307 + * @param client The socket associated to the client
308 + * @return A proper session
309 + */
310 + protected Session handleRequest(String uri, Socket client) throws IllegalStateException, IOException {
311 + Session session = UriParser.parse(uri);
312 + session.setOrigin(client.getLocalAddress().getHostAddress());
313 + if (session.getDestination()==null) {
314 + session.setDestination(client.getInetAddress().getHostAddress());
315 + }
316 + return session;
317 + }
318 +
319 + class RequestListener extends Thread implements Runnable {
320 +
321 + private final ServerSocket mServer;
322 +
323 + public RequestListener() throws IOException {
324 + try {
325 + mServer = new ServerSocket(mPort);
326 + start();
327 + } catch (BindException e) {
328 + Log.e(TAG,"Port already in use !");
329 + postError(e, ERROR_BIND_FAILED);
330 + throw e;
331 + }
332 + }
333 +
334 + public void run() {
335 + Log.i(TAG,"RTSP server listening on port "+mServer.getLocalPort());
336 + while (!Thread.interrupted()) {
337 + try {
338 + new WorkerThread(mServer.accept()).start();
339 + } catch (SocketException e) {
340 + break;
341 + } catch (IOException e) {
342 + Log.e(TAG,e.getMessage());
343 + continue;
344 + }
345 + }
346 + Log.i(TAG,"RTSP server stopped !");
347 + }
348 +
349 + public void kill() {
350 + try {
351 + mServer.close();
352 + } catch (IOException e) {}
353 + try {
354 + this.join();
355 + } catch (InterruptedException ignore) {}
356 + }
357 +
358 + }
359 +
360 + // One thread per client
361 + class WorkerThread extends Thread implements Runnable {
362 +
363 + private final Socket mClient;
364 + private final OutputStream mOutput;
365 + private final BufferedReader mInput;
366 +
367 + // Each client has an associated session
368 + private Session mSession;
369 +
370 + public WorkerThread(final Socket client) throws IOException {
371 + mInput = new BufferedReader(new InputStreamReader(client.getInputStream()));
372 + mOutput = client.getOutputStream();
373 + mClient = client;
374 + mSession = new Session();
375 + }
376 +
377 + public void run() {
378 + Request request;
379 + Response response;
380 +
381 + Log.i(TAG, "Connection from "+mClient.getInetAddress().getHostAddress());
382 +
383 + while (!Thread.interrupted()) {
384 +
385 + request = null;
386 + response = null;
387 +
388 + // Parse the request
389 + try {
390 + request = Request.parseRequest(mInput);
391 + } catch (SocketException e) {
392 + // Client has left
393 + break;
394 + } catch (Exception e) {
395 + // We don't understand the request :/
396 + response = new Response();
397 + response.status = Response.STATUS_BAD_REQUEST;
398 + }
399 +
400 + // Do something accordingly like starting the streams, sending a session description
401 + if (request != null) {
402 + try {
403 + response = processRequest(request);
404 + }
405 + catch (Exception e) {
406 + // This alerts the main thread that something has gone wrong in this thread
407 + postError(e, ERROR_START_FAILED);
408 + Log.e(TAG,e.getMessage()!=null?e.getMessage():"An error occurred");
409 + e.printStackTrace();
410 + response = new Response(request);
411 + }
412 + }
413 +
414 + // We always send a response
415 + // The client will receive an "INTERNAL SERVER ERROR" if an exception has been thrown at some point
416 + try {
417 + response.send(mOutput);
418 + } catch (IOException e) {
419 + Log.e(TAG,"Response was not sent properly");
420 + break;
421 + }
422 +
423 + }
424 +
425 + // Streaming stops when client disconnects
426 + boolean streaming = isStreaming();
427 + mSession.syncStop();
428 + if (streaming && !isStreaming()) {
429 + postMessage(MESSAGE_STREAMING_STOPPED);
430 + }
431 + mSession.release();
432 +
433 + try {
434 + mClient.close();
435 + } catch (IOException ignore) {}
436 +
437 + Log.i(TAG, "Client disconnected");
438 +
439 + }
440 +
441 + public Response processRequest(Request request) throws IllegalStateException, IOException {
442 + Response response = new Response(request);
443 +
444 + //Ask for authorization unless this is an OPTIONS request
445 + if(!isAuthorized(request) && !request.method.equalsIgnoreCase("OPTIONS"))
446 + {
447 + response.attributes = "WWW-Authenticate: Basic realm=\""+SERVER_NAME+"\"\r\n";
448 + response.status = Response.STATUS_UNAUTHORIZED;
449 + }
450 + else
451 + {
452 + /* ********************************************************************************** */
453 + /* ********************************* Method DESCRIBE ******************************** */
454 + /* ********************************************************************************** */
455 + if (request.method.equalsIgnoreCase("DESCRIBE")) {
456 +
457 + // Parse the requested URI and configure the session
458 + mSession = handleRequest(request.uri, mClient);
459 + mSessions.put(mSession, null);
460 + mSession.syncConfigure();
461 +
462 + String requestContent = mSession.getSessionDescription();
463 + String requestAttributes =
464 + "Content-Base: " + mClient.getLocalAddress().getHostAddress() + ":" + mClient.getLocalPort() + "/\r\n" +
465 + "Content-Type: application/sdp\r\n";
466 +
467 + response.attributes = requestAttributes;
468 + response.content = requestContent;
469 +
470 + // If no exception has been thrown, we reply with OK
471 + response.status = Response.STATUS_OK;
472 +
473 + }
474 +
475 + /* ********************************************************************************** */
476 + /* ********************************* Method OPTIONS ********************************* */
477 + /* ********************************************************************************** */
478 + else if (request.method.equalsIgnoreCase("OPTIONS")) {
479 + response.status = Response.STATUS_OK;
480 + response.attributes = "Public: DESCRIBE,SETUP,TEARDOWN,PLAY,PAUSE\r\n";
481 + response.status = Response.STATUS_OK;
482 + }
483 +
484 + /* ********************************************************************************** */
485 + /* ********************************** Method SETUP ********************************** */
486 + /* ********************************************************************************** */
487 + else if (request.method.equalsIgnoreCase("SETUP")) {
488 + Pattern p;
489 + Matcher m;
490 + int p2, p1, ssrc, trackId, src[];
491 + String destination;
492 +
493 + p = Pattern.compile("trackID=(\\w+)", Pattern.CASE_INSENSITIVE);
494 + m = p.matcher(request.uri);
495 +
496 + if (!m.find()) {
497 + response.status = Response.STATUS_BAD_REQUEST;
498 + return response;
499 + }
500 +
501 + trackId = Integer.parseInt(m.group(1));
502 +
503 + if (!mSession.trackExists(trackId)) {
504 + response.status = Response.STATUS_NOT_FOUND;
505 + return response;
506 + }
507 +
508 + p = Pattern.compile("client_port=(\\d+)(?:-(\\d+))?", Pattern.CASE_INSENSITIVE);
509 + m = p.matcher(request.headers.get("transport"));
510 +
511 + if (!m.find()) {
512 + int[] ports = mSession.getTrack(trackId).getDestinationPorts();
513 + p1 = ports[0];
514 + p2 = ports[1];
515 + } else {
516 + p1 = Integer.parseInt(m.group(1));
517 + if (m.group(2) == null) {
518 + p2 = p1+1;
519 + } else {
520 + p2 = Integer.parseInt(m.group(2));
521 + }
522 + }
523 +
524 + ssrc = mSession.getTrack(trackId).getSSRC();
525 + src = mSession.getTrack(trackId).getLocalPorts();
526 + destination = mSession.getDestination();
527 +
528 + mSession.getTrack(trackId).setDestinationPorts(p1, p2);
529 +
530 + boolean streaming = isStreaming();
531 + mSession.syncStart(trackId);
532 + if (!streaming && isStreaming()) {
533 + postMessage(MESSAGE_STREAMING_STARTED);
534 + }
535 +
536 + response.attributes = "Transport: RTP/AVP/UDP;" + (InetAddress.getByName(destination).isMulticastAddress() ? "multicast" : "unicast") +
537 + ";destination=" + mSession.getDestination() +
538 + ";client_port=" + p1 + "-" + p2 +
539 + ";server_port=" + src[0] + "-" + src[1] +
540 + ";ssrc=" + Integer.toHexString(ssrc) +
541 + ";mode=play\r\n" +
542 + "Session: " + "1185d20035702ca" + "\r\n" +
543 + "Cache-Control: no-cache\r\n";
544 + response.status = Response.STATUS_OK;
545 +
546 + // If no exception has been thrown, we reply with OK
547 + response.status = Response.STATUS_OK;
548 +
549 + }
550 +
551 + /* ********************************************************************************** */
552 + /* ********************************** Method PLAY *********************************** */
553 + /* ********************************************************************************** */
554 + else if (request.method.equalsIgnoreCase("PLAY")) {
555 + String requestAttributes = "RTP-Info: ";
556 + if (mSession.trackExists(0))
557 + requestAttributes += "url=rtsp://" + mClient.getLocalAddress().getHostAddress() + ":" + mClient.getLocalPort() + "/trackID=" + 0 + ";seq=0,";
558 + if (mSession.trackExists(1))
559 + requestAttributes += "url=rtsp://" + mClient.getLocalAddress().getHostAddress() + ":" + mClient.getLocalPort() + "/trackID=" + 1 + ";seq=0,";
560 + requestAttributes = requestAttributes.substring(0, requestAttributes.length() - 1) + "\r\nSession: 1185d20035702ca\r\n";
561 +
562 + response.attributes = requestAttributes;
563 +
564 + // If no exception has been thrown, we reply with OK
565 + response.status = Response.STATUS_OK;
566 +
567 + }
568 +
569 + /* ********************************************************************************** */
570 + /* ********************************** Method PAUSE ********************************** */
571 + /* ********************************************************************************** */
572 + else if (request.method.equalsIgnoreCase("PAUSE")) {
573 + response.status = Response.STATUS_OK;
574 + }
575 +
576 + /* ********************************************************************************** */
577 + /* ********************************* Method TEARDOWN ******************************** */
578 + /* ********************************************************************************** */
579 + else if (request.method.equalsIgnoreCase("TEARDOWN")) {
580 + response.status = Response.STATUS_OK;
581 + }
582 +
583 + /* ********************************************************************************** */
584 + /* ********************************* Unknown method ? ******************************* */
585 + /* ********************************************************************************** */
586 + else {
587 + Log.e(TAG, "Command unknown: " + request);
588 + response.status = Response.STATUS_BAD_REQUEST;
589 + }
590 + }
591 + return response;
592 +
593 + }
594 +
595 + /**
596 + * Check if the request is authorized
597 + * @param request
598 + * @return true or false
599 + */
600 + private boolean isAuthorized(Request request)
601 + {
602 + String auth = request.headers.get("authorization");
603 + if(mUsername == null || mPassword == null || mUsername.isEmpty())
604 + return true;
605 +
606 + if(auth != null && !auth.isEmpty())
607 + {
608 + String received = auth.substring(auth.lastIndexOf(" ")+1);
609 + String local = mUsername+":"+mPassword;
610 + String localEncoded = Base64.encodeToString(local.getBytes(),Base64.NO_WRAP);
611 + if(localEncoded.equals(received))
612 + return true;
613 + }
614 +
615 + return false;
616 + }
617 + }
618 +
619 + static class Request {
620 +
621 + // Parse method & uri
622 + public static final Pattern regexMethod = Pattern.compile("(\\w+) (\\S+) RTSP",Pattern.CASE_INSENSITIVE);
623 + // Parse a request header
624 + public static final Pattern rexegHeader = Pattern.compile("(\\S+):(.+)",Pattern.CASE_INSENSITIVE);
625 +
626 + public String method;
627 + public String uri;
628 + public HashMap<String,String> headers = new HashMap<>();
629 +
630 + /** Parse the method, uri & headers of a RTSP request */
631 + public static Request parseRequest(BufferedReader input) throws IOException, IllegalStateException, SocketException {
632 + Request request = new Request();
633 + String line;
634 + Matcher matcher;
635 +
636 + // Parsing request method & uri
637 + if ((line = input.readLine())==null) throw new SocketException("Client disconnected");
638 + matcher = regexMethod.matcher(line);
639 + matcher.find();
640 + request.method = matcher.group(1);
641 + request.uri = matcher.group(2);
642 +
643 + // Parsing headers of the request
644 + while ( (line = input.readLine()) != null && line.length()>3 ) {
645 + matcher = rexegHeader.matcher(line);
646 + matcher.find();
647 + request.headers.put(matcher.group(1).toLowerCase(Locale.US),matcher.group(2));
648 + }
649 + if (line==null) throw new SocketException("Client disconnected");
650 +
651 + // It's not an error, it's just easier to follow what's happening in logcat with the request in red
652 + Log.e(TAG,request.method+" "+request.uri);
653 +
654 + return request;
655 + }
656 + }
657 +
658 + static class Response {
659 +
660 + // Status code definitions
661 + public static final String STATUS_OK = "200 OK";
662 + public static final String STATUS_BAD_REQUEST = "400 Bad Request";
663 + public static final String STATUS_UNAUTHORIZED = "401 Unauthorized";
664 + public static final String STATUS_NOT_FOUND = "404 Not Found";
665 + public static final String STATUS_INTERNAL_SERVER_ERROR = "500 Internal Server Error";
666 +
667 + public String status = STATUS_INTERNAL_SERVER_ERROR;
668 + public String content = "";
669 + public String attributes = "";
670 +
671 + private final Request mRequest;
672 +
673 + public Response(Request request) {
674 + this.mRequest = request;
675 + }
676 +
677 + public Response() {
678 + // Be carefull if you modify the send() method because request might be null !
679 + mRequest = null;
680 + }
681 +
682 + public void send(OutputStream output) throws IOException {
683 + int seqid = -1;
684 +
685 + try {
686 + seqid = Integer.parseInt(mRequest.headers.get("cseq").replace(" ",""));
687 + } catch (Exception e) {
688 + Log.e(TAG,"Error parsing CSeq: "+(e.getMessage()!=null?e.getMessage():""));
689 + }
690 +
691 + String response = "RTSP/1.0 "+status+"\r\n" +
692 + "Server: "+SERVER_NAME+"\r\n" +
693 + (seqid>=0?("Cseq: " + seqid + "\r\n"):"") +
694 + "Content-Length: " + content.length() + "\r\n" +
695 + attributes +
696 + "\r\n" +
697 + content;
698 +
699 + Log.d(TAG,response.replace("\r", ""));
700 +
701 + output.write(response.getBytes());
702 + }
703 + }
704 +
705 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.rtsp;
20 +
21 +import static net.majorkernelpanic.streaming.SessionBuilder.AUDIO_AAC;
22 +import static net.majorkernelpanic.streaming.SessionBuilder.AUDIO_AMRNB;
23 +import static net.majorkernelpanic.streaming.SessionBuilder.AUDIO_NONE;
24 +import static net.majorkernelpanic.streaming.SessionBuilder.VIDEO_H263;
25 +import static net.majorkernelpanic.streaming.SessionBuilder.VIDEO_H264;
26 +import static net.majorkernelpanic.streaming.SessionBuilder.VIDEO_NONE;
27 +
28 +import java.io.IOException;
29 +import java.net.InetAddress;
30 +import java.net.URI;
31 +import java.net.URLEncoder;
32 +import java.net.UnknownHostException;
33 +import java.util.Set;
34 +import net.majorkernelpanic.streaming.MediaStream;
35 +import net.majorkernelpanic.streaming.Session;
36 +import net.majorkernelpanic.streaming.SessionBuilder;
37 +import net.majorkernelpanic.streaming.audio.AudioQuality;
38 +import net.majorkernelpanic.streaming.video.VideoQuality;
39 +
40 +import android.content.ContentValues;
41 +import android.hardware.Camera.CameraInfo;
42 +
43 +/**
44 + * This class parses URIs received by the RTSP server and configures a Session accordingly.
45 + */
46 +public class UriParser {
47 +
48 + public final static String TAG = "UriParser";
49 +
50 + /**
51 + * Configures a Session according to the given URI.
52 + * Here are some examples of URIs that can be used to configure a Session:
53 + * <ul><li>rtsp://xxx.xxx.xxx.xxx:8086?h264&flash=on</li>
54 + * <li>rtsp://xxx.xxx.xxx.xxx:8086?h263&camera=front&flash=on</li>
55 + * <li>rtsp://xxx.xxx.xxx.xxx:8086?h264=200-20-320-240</li>
56 + * <li>rtsp://xxx.xxx.xxx.xxx:8086?aac</li></ul>
57 + * @param uri The URI
58 + * @throws IllegalStateException
59 + * @throws IOException
60 + * @return A Session configured according to the URI
61 + */
62 + public static Session parse(String uri) throws IllegalStateException, IOException {
63 + SessionBuilder builder = SessionBuilder.getInstance().clone();
64 + byte audioApi = 0, videoApi = 0;
65 +
66 + String query = URI.create(uri).getQuery();
67 + String[] queryParams = query == null ? new String[0] : query.split("&");
68 + ContentValues params = new ContentValues();
69 + for(String param:queryParams)
70 + {
71 + String[] keyValue = param.split("=");
72 + String value = "";
73 + try {
74 + value = keyValue[1];
75 + }catch(ArrayIndexOutOfBoundsException e){}
76 +
77 + params.put(
78 + URLEncoder.encode(keyValue[0], "UTF-8"), // Name
79 + URLEncoder.encode(value, "UTF-8") // Value
80 + );
81 +
82 + }
83 +
84 + if (params.size()>0) {
85 +
86 + builder.setAudioEncoder(AUDIO_NONE).setVideoEncoder(VIDEO_NONE);
87 + Set<String> paramKeys=params.keySet();
88 + // Those parameters must be parsed first or else they won't necessarily be taken into account
89 + for(String paramName: paramKeys) {
90 + String paramValue = params.getAsString(paramName);
91 +
92 + // FLASH ON/OFF
93 + if (paramName.equalsIgnoreCase("flash")) {
94 + if (paramValue.equalsIgnoreCase("on"))
95 + builder.setFlashEnabled(true);
96 + else
97 + builder.setFlashEnabled(false);
98 + }
99 +
100 + // CAMERA -> the client can choose between the front facing camera and the back facing camera
101 + else if (paramName.equalsIgnoreCase("camera")) {
102 + if (paramValue.equalsIgnoreCase("back"))
103 + builder.setCamera(CameraInfo.CAMERA_FACING_BACK);
104 + else if (paramValue.equalsIgnoreCase("front"))
105 + builder.setCamera(CameraInfo.CAMERA_FACING_FRONT);
106 + }
107 +
108 + // MULTICAST -> the stream will be sent to a multicast group
109 + // The default mutlicast address is 228.5.6.7, but the client can specify another
110 + else if (paramName.equalsIgnoreCase("multicast")) {
111 + if (paramValue!=null) {
112 + try {
113 + InetAddress addr = InetAddress.getByName(paramValue);
114 + if (!addr.isMulticastAddress()) {
115 + throw new IllegalStateException("Invalid multicast address !");
116 + }
117 + builder.setDestination(paramValue);
118 + } catch (UnknownHostException e) {
119 + throw new IllegalStateException("Invalid multicast address !");
120 + }
121 + }
122 + else {
123 + // Default multicast address
124 + builder.setDestination("228.5.6.7");
125 + }
126 + }
127 +
128 + // UNICAST -> the client can use this to specify where he wants the stream to be sent
129 + else if (paramName.equalsIgnoreCase("unicast")) {
130 + if (paramValue!=null) {
131 + builder.setDestination(paramValue);
132 + }
133 + }
134 +
135 + // VIDEOAPI -> can be used to specify what api will be used to encode video (the MediaRecorder API or the MediaCodec API)
136 + else if (paramName.equalsIgnoreCase("videoapi")) {
137 + if (paramValue!=null) {
138 + if (paramValue.equalsIgnoreCase("mr")) {
139 + videoApi = MediaStream.MODE_MEDIARECORDER_API;
140 + } else if (paramValue.equalsIgnoreCase("mc")) {
141 + videoApi = MediaStream.MODE_MEDIACODEC_API;
142 + }
143 + }
144 + }
145 +
146 + // AUDIOAPI -> can be used to specify what api will be used to encode audio (the MediaRecorder API or the MediaCodec API)
147 + else if (paramName.equalsIgnoreCase("audioapi")) {
148 + if (paramValue!=null) {
149 + if (paramValue.equalsIgnoreCase("mr")) {
150 + audioApi = MediaStream.MODE_MEDIARECORDER_API;
151 + } else if (paramValue.equalsIgnoreCase("mc")) {
152 + audioApi = MediaStream.MODE_MEDIACODEC_API;
153 + }
154 + }
155 + }
156 +
157 + // TTL -> the client can modify the time to live of packets
158 + // By default ttl=64
159 + else if (paramName.equalsIgnoreCase("ttl")) {
160 + if (paramValue!=null) {
161 + try {
162 + int ttl = Integer.parseInt(paramValue);
163 + if (ttl<0) throw new IllegalStateException();
164 + builder.setTimeToLive(ttl);
165 + } catch (Exception e) {
166 + throw new IllegalStateException("The TTL must be a positive integer !");
167 + }
168 + }
169 + }
170 +
171 + // H.264
172 + else if (paramName.equalsIgnoreCase("h264")) {
173 + VideoQuality quality = VideoQuality.parseQuality(paramValue);
174 + builder.setVideoQuality(quality).setVideoEncoder(VIDEO_H264);
175 + }
176 +
177 + // H.263
178 + else if (paramName.equalsIgnoreCase("h263")) {
179 + VideoQuality quality = VideoQuality.parseQuality(paramValue);
180 + builder.setVideoQuality(quality).setVideoEncoder(VIDEO_H263);
181 + }
182 +
183 + // AMR
184 + else if (paramName.equalsIgnoreCase("amrnb") || paramName.equalsIgnoreCase("amr")) {
185 + AudioQuality quality = AudioQuality.parseQuality(paramValue);
186 + builder.setAudioQuality(quality).setAudioEncoder(AUDIO_AMRNB);
187 + }
188 +
189 + // AAC
190 + else if (paramName.equalsIgnoreCase("aac")) {
191 + AudioQuality quality = AudioQuality.parseQuality(paramValue);
192 + builder.setAudioQuality(quality).setAudioEncoder(AUDIO_AAC);
193 + }
194 +
195 + }
196 +
197 + }
198 +
199 + if (builder.getVideoEncoder()==VIDEO_NONE && builder.getAudioEncoder()==AUDIO_NONE) {
200 + SessionBuilder b = SessionBuilder.getInstance();
201 + builder.setVideoEncoder(b.getVideoEncoder());
202 + builder.setAudioEncoder(b.getAudioEncoder());
203 + }
204 +
205 + Session session = builder.build();
206 +
207 + if (videoApi>0 && session.getVideoTrack() != null) {
208 + session.getVideoTrack().setStreamingMethod(videoApi);
209 + }
210 +
211 + if (audioApi>0 && session.getAudioTrack() != null) {
212 + session.getAudioTrack().setStreamingMethod(audioApi);
213 + }
214 +
215 + return session;
216 +
217 + }
218 +
219 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.video;
20 +
21 +import java.util.ArrayList;
22 +import java.util.HashMap;
23 +import android.annotation.SuppressLint;
24 +import android.media.MediaCodecInfo;
25 +import android.media.MediaCodecList;
26 +import android.os.Build;
27 +import android.util.Log;
28 +import android.util.SparseArray;
29 +
30 +@SuppressLint("InlinedApi")
31 +public class CodecManager {
32 +
33 + public final static String TAG = "CodecManager";
34 +
35 + public static final int[] SUPPORTED_COLOR_FORMATS = {
36 + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
37 + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar
38 + };
39 +
40 + /**
41 + * There currently is no way to know if an encoder is software or hardware from the MediaCodecInfo class,
42 + * so we need to maintain a list of known software encoders.
43 + */
44 + public static final String[] SOFTWARE_ENCODERS = {
45 + "OMX.google.h264.encoder"
46 + };
47 +
48 + /**
49 + * Contains a list of encoders and color formats that we may use with a {@link CodecManager.Translator}.
50 + */
51 + static class Codecs {
52 + /** A hardware encoder supporting a color format we can use. */
53 + public String hardwareCodec;
54 + public int hardwareColorFormat;
55 + /** A software encoder supporting a color format we can use. */
56 + public String softwareCodec;
57 + public int softwareColorFormat;
58 + }
59 +
60 + /**
61 + * Contains helper functions to choose an encoder and a color format.
62 + */
63 + static class Selector {
64 +
65 + private static HashMap<String,SparseArray<ArrayList<String>>> sHardwareCodecs = new HashMap<>();
66 + private static HashMap<String,SparseArray<ArrayList<String>>> sSoftwareCodecs = new HashMap<>();
67 +
68 + /**
69 + * Determines the most appropriate encoder to compress the video from the Camera
70 + */
71 + public static Codecs findCodecsFormMimeType(String mimeType, boolean tryColorFormatSurface) {
72 + findSupportedColorFormats(mimeType);
73 + SparseArray<ArrayList<String>> hardwareCodecs = sHardwareCodecs.get(mimeType);
74 + SparseArray<ArrayList<String>> softwareCodecs = sSoftwareCodecs.get(mimeType);
75 + Codecs list = new Codecs();
76 +
77 + // On devices running 4.3, we need an encoder supporting the color format used to work with a Surface
78 + if (Build.VERSION.SDK_INT>=18 && tryColorFormatSurface) {
79 + int colorFormatSurface = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
80 + try {
81 + // We want a hardware encoder
82 + list.hardwareCodec = hardwareCodecs.get(colorFormatSurface).get(0);
83 + list.hardwareColorFormat = colorFormatSurface;
84 + } catch (Exception e) {}
85 + try {
86 + // We want a software encoder
87 + list.softwareCodec = softwareCodecs.get(colorFormatSurface).get(0);
88 + list.softwareColorFormat = colorFormatSurface;
89 + } catch (Exception e) {}
90 +
91 + if (list.hardwareCodec != null) {
92 + Log.v(TAG,"Choosen primary codec: "+list.hardwareCodec+" with color format: "+list.hardwareColorFormat);
93 + } else {
94 + Log.e(TAG,"No supported hardware codec found !");
95 + }
96 + if (list.softwareCodec != null) {
97 + Log.v(TAG,"Choosen secondary codec: "+list.hardwareCodec+" with color format: "+list.hardwareColorFormat);
98 + } else {
99 + Log.e(TAG,"No supported software codec found !");
100 + }
101 + return list;
102 + }
103 +
104 + for (int i=0;i<SUPPORTED_COLOR_FORMATS.length;i++) {
105 + try {
106 + list.hardwareCodec = hardwareCodecs.get(SUPPORTED_COLOR_FORMATS[i]).get(0);
107 + list.hardwareColorFormat = SUPPORTED_COLOR_FORMATS[i];
108 + break;
109 + } catch (Exception e) {}
110 + }
111 + for (int i=0;i<SUPPORTED_COLOR_FORMATS.length;i++) {
112 + try {
113 + list.softwareCodec = softwareCodecs.get(SUPPORTED_COLOR_FORMATS[i]).get(0);
114 + list.softwareColorFormat = SUPPORTED_COLOR_FORMATS[i];
115 + break;
116 + } catch (Exception e) {}
117 + }
118 +
119 + if (list.hardwareCodec != null) {
120 + Log.v(TAG,"Choosen primary codec: "+list.hardwareCodec+" with color format: "+list.hardwareColorFormat);
121 + } else {
122 + Log.e(TAG,"No supported hardware codec found !");
123 + }
124 + if (list.softwareCodec != null) {
125 + Log.v(TAG,"Choosen secondary codec: "+list.hardwareCodec+" with color format: "+list.softwareColorFormat);
126 + } else {
127 + Log.e(TAG,"No supported software codec found !");
128 + }
129 +
130 + return list;
131 + }
132 +
133 + /**
134 + * Returns an associative array of the supported color formats and the names of the encoders for a given mime type
135 + * This can take up to sec on certain phones the first time you run it...
136 + **/
137 + @SuppressLint("NewApi")
138 + static private void findSupportedColorFormats(String mimeType) {
139 + SparseArray<ArrayList<String>> softwareCodecs = new SparseArray<ArrayList<String>>();
140 + SparseArray<ArrayList<String>> hardwareCodecs = new SparseArray<ArrayList<String>>();
141 +
142 + if (sSoftwareCodecs.containsKey(mimeType)) {
143 + return;
144 + }
145 +
146 + Log.v(TAG,"Searching supported color formats for mime type \""+mimeType+"\"...");
147 +
148 + // We loop through the encoders, apparently this can take up to a sec (testes on a GS3)
149 + for(int j = MediaCodecList.getCodecCount() - 1; j >= 0; j--){
150 + MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(j);
151 + if (!codecInfo.isEncoder()) continue;
152 +
153 + String[] types = codecInfo.getSupportedTypes();
154 + for (int i = 0; i < types.length; i++) {
155 + if (types[i].equalsIgnoreCase(mimeType)) {
156 + MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
157 +
158 + boolean software = false;
159 + for (int k=0;k<SOFTWARE_ENCODERS.length;k++) {
160 + if (codecInfo.getName().equalsIgnoreCase(SOFTWARE_ENCODERS[i])) {
161 + software = true;
162 + }
163 + }
164 +
165 + // And through the color formats supported
166 + for (int k = 0; k < capabilities.colorFormats.length; k++) {
167 + int format = capabilities.colorFormats[k];
168 + if (software) {
169 + if (softwareCodecs.get(format) == null) softwareCodecs.put(format, new ArrayList<String>());
170 + softwareCodecs.get(format).add(codecInfo.getName());
171 + } else {
172 + if (hardwareCodecs.get(format) == null) hardwareCodecs.put(format, new ArrayList<String>());
173 + hardwareCodecs.get(format).add(codecInfo.getName());
174 + }
175 + }
176 +
177 + }
178 + }
179 + }
180 +
181 + // Logs the supported color formats on the phone
182 + StringBuilder e = new StringBuilder();
183 + e.append("Supported color formats on this phone: ");
184 + for (int i=0;i<softwareCodecs.size();i++) e.append(softwareCodecs.keyAt(i)+", ");
185 + for (int i=0;i<hardwareCodecs.size();i++) e.append(hardwareCodecs.keyAt(i)+(i==hardwareCodecs.size()-1?".":", "));
186 + Log.v(TAG, e.toString());
187 +
188 + sSoftwareCodecs.put(mimeType, softwareCodecs);
189 + sHardwareCodecs.put(mimeType, hardwareCodecs);
190 + return;
191 + }
192 +
193 +
194 + }
195 +
196 + static class Translator {
197 +
198 + private int mOutputColorFormat;
199 + private int mWidth;
200 + private int mHeight;
201 + private int mYStride;
202 + private int mUVStride;
203 + private int mYSize;
204 + private int mUVSize;
205 + private int bufferSize;
206 + private int i;
207 + private byte[] tmp;
208 +
209 + public Translator(int outputColorFormat, int width, int height) {
210 + mOutputColorFormat = outputColorFormat;
211 + mWidth = width;
212 + mHeight = height;
213 + mYStride = (int) Math.ceil(mWidth / 16.0) * 16;
214 + mUVStride = (int) Math.ceil( (mYStride / 2) / 16.0) * 16;
215 + mYSize = mYStride * mHeight;
216 + mUVSize = mUVStride * mHeight / 2;
217 + bufferSize = mYSize + mUVSize * 2;
218 + tmp = new byte[mUVSize*2];
219 + }
220 +
221 + public int getBufferSize() {
222 + return bufferSize;
223 + }
224 +
225 + public int getUVStride() {
226 + return mUVStride;
227 + }
228 +
229 + public int getYStride() {
230 + return mYStride;
231 + }
232 +
233 + public byte[] translate(byte[] buffer) {
234 +
235 + if (mOutputColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
236 + // FIXME: May be issues because of padding here :/
237 + int wh4 = bufferSize/6; //wh4 = width*height/4
238 + byte tmp;
239 + for (i=wh4*4; i<wh4*5; i++) {
240 + tmp = buffer[i];
241 + buffer[i] = buffer[i+wh4];
242 + buffer[i+wh4] = tmp;
243 + }
244 + }
245 +
246 + else if (mOutputColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
247 + // We need to interleave the U and V channel
248 + System.arraycopy(buffer, mYSize, tmp, 0, mUVSize*2); // Y
249 + for (i = 0; i < mUVSize; i++) {
250 + buffer[mYSize + i*2] = tmp[i + mUVSize]; // Cb (U)
251 + buffer[mYSize + i*2+1] = tmp[i]; // Cr (V)
252 + }
253 + }
254 +
255 + return buffer;
256 + }
257 +
258 +
259 + }
260 +
261 +
262 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.video;
20 +
21 +import java.io.IOException;
22 +import net.majorkernelpanic.streaming.SessionBuilder;
23 +import net.majorkernelpanic.streaming.rtp.H263Packetizer;
24 +import android.graphics.ImageFormat;
25 +import android.hardware.Camera.CameraInfo;
26 +import android.media.MediaRecorder;
27 +import android.service.textservice.SpellCheckerService.Session;
28 +
29 +/**
30 + * A class for streaming H.263 from the camera of an android device using RTP.
31 + * You should use a {@link Session} instantiated with {@link SessionBuilder} instead of using this class directly.
32 + * Call {@link #setDestinationAddress(InetAddress)}, {@link #setDestinationPorts(int)} and {@link #setVideoQuality(VideoQuality)}
33 + * to configure the stream. You can then call {@link #start()} to start the RTP stream.
34 + * Call {@link #stop()} to stop the stream.
35 + */
36 +public class H263Stream extends VideoStream {
37 +
38 + /**
39 + * Constructs the H.263 stream.
40 + * Uses CAMERA_FACING_BACK by default.
41 + * @throws IOException
42 + */
43 + public H263Stream() throws IOException {
44 + this(CameraInfo.CAMERA_FACING_BACK);
45 + }
46 +
47 + /**
48 + * Constructs the H.263 stream.
49 + * @param cameraId Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT
50 + * @throws IOException
51 + */
52 + public H263Stream(int cameraId) {
53 + super(cameraId);
54 + mCameraImageFormat = ImageFormat.NV21;
55 + mVideoEncoder = MediaRecorder.VideoEncoder.H263;
56 + mPacketizer = new H263Packetizer();
57 + }
58 +
59 + /**
60 + * Starts the stream.
61 + */
62 + public synchronized void start() throws IllegalStateException, IOException {
63 + if (!mStreaming) {
64 + configure();
65 + super.start();
66 + }
67 + }
68 +
69 + public synchronized void configure() throws IllegalStateException, IOException {
70 + super.configure();
71 + mMode = MODE_MEDIARECORDER_API;
72 + mQuality = mRequestedQuality.clone();
73 + }
74 +
75 + /**
76 + * Returns a description of the stream using SDP. It can then be included in an SDP file.
77 + */
78 + public String getSessionDescription() {
79 + return "m=video "+String.valueOf(getDestinationPorts()[0])+" RTP/AVP 96\r\n" +
80 + "a=rtpmap:96 H263-1998/90000\r\n";
81 + }
82 +
83 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.video;
20 +
21 +import java.io.File;
22 +import java.io.IOException;
23 +import java.util.concurrent.Semaphore;
24 +import java.util.concurrent.TimeUnit;
25 +import net.majorkernelpanic.streaming.SessionBuilder;
26 +import net.majorkernelpanic.streaming.exceptions.ConfNotSupportedException;
27 +import net.majorkernelpanic.streaming.exceptions.StorageUnavailableException;
28 +import net.majorkernelpanic.streaming.hw.EncoderDebugger;
29 +import net.majorkernelpanic.streaming.mp4.MP4Config;
30 +import net.majorkernelpanic.streaming.rtp.H264Packetizer;
31 +import android.annotation.SuppressLint;
32 +import android.content.SharedPreferences.Editor;
33 +import android.graphics.ImageFormat;
34 +import android.hardware.Camera.CameraInfo;
35 +import android.media.MediaRecorder;
36 +import android.os.Environment;
37 +import android.service.textservice.SpellCheckerService.Session;
38 +import android.util.Base64;
39 +import android.util.Log;
40 +
41 +/**
42 + * A class for streaming H.264 from the camera of an android device using RTP.
43 + * You should use a {@link Session} instantiated with {@link SessionBuilder} instead of using this class directly.
44 + * Call {@link #setDestinationAddress(InetAddress)}, {@link #setDestinationPorts(int)} and {@link #setVideoQuality(VideoQuality)}
45 + * to configure the stream. You can then call {@link #start()} to start the RTP stream.
46 + * Call {@link #stop()} to stop the stream.
47 + */
48 +public class H264Stream extends VideoStream {
49 +
50 + public final static String TAG = "H264Stream";
51 +
52 + private Semaphore mLock = new Semaphore(0);
53 + private MP4Config mConfig;
54 +
55 + /**
56 + * Constructs the H.264 stream.
57 + * Uses CAMERA_FACING_BACK by default.
58 + */
59 + public H264Stream() {
60 + this(CameraInfo.CAMERA_FACING_BACK);
61 + }
62 +
63 + /**
64 + * Constructs the H.264 stream.
65 + * @param cameraId Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT
66 + * @throws IOException
67 + */
68 + public H264Stream(int cameraId) {
69 + super(cameraId);
70 + mMimeType = "video/avc";
71 + mCameraImageFormat = ImageFormat.NV21;
72 + mVideoEncoder = MediaRecorder.VideoEncoder.H264;
73 + mPacketizer = new H264Packetizer();
74 + }
75 +
76 + /**
77 + * Returns a description of the stream using SDP. It can then be included in an SDP file.
78 + */
79 + public synchronized String getSessionDescription() throws IllegalStateException {
80 + if (mConfig == null) throw new IllegalStateException("You need to call configure() first !");
81 + return "m=video "+String.valueOf(getDestinationPorts()[0])+" RTP/AVP 96\r\n" +
82 + "a=rtpmap:96 H264/90000\r\n" +
83 + "a=fmtp:96 packetization-mode=1;profile-level-id="+mConfig.getProfileLevel()+";sprop-parameter-sets="+mConfig.getB64SPS()+","+mConfig.getB64PPS()+";\r\n";
84 + }
85 +
86 + /**
87 + * Starts the stream.
88 + * This will also open the camera and display the preview if {@link #startPreview()} has not already been called.
89 + */
90 + public synchronized void start() throws IllegalStateException, IOException {
91 + if (!mStreaming) {
92 + configure();
93 + byte[] pps = Base64.decode(mConfig.getB64PPS(), Base64.NO_WRAP);
94 + byte[] sps = Base64.decode(mConfig.getB64SPS(), Base64.NO_WRAP);
95 + ((H264Packetizer)mPacketizer).setStreamParameters(pps, sps);
96 + super.start();
97 + }
98 + }
99 +
100 + /**
101 + * Configures the stream. You need to call this before calling {@link #getSessionDescription()} to apply
102 + * your configuration of the stream.
103 + */
104 + public synchronized void configure() throws IllegalStateException, IOException {
105 + super.configure();
106 + mMode = mRequestedMode;
107 + mQuality = mRequestedQuality.clone();
108 + mConfig = testH264();
109 + }
110 +
111 + /**
112 + * Tests if streaming with the given configuration (bit rate, frame rate, resolution) is possible
113 + * and determines the pps and sps. Should not be called by the UI thread.
114 + **/
115 + private MP4Config testH264() throws IllegalStateException, IOException {
116 + if (mMode != MODE_MEDIARECORDER_API) return testMediaCodecAPI();
117 + else return testMediaRecorderAPI();
118 + }
119 +
120 + @SuppressLint("NewApi")
121 + private MP4Config testMediaCodecAPI() throws RuntimeException, IOException {
122 + createCamera();
123 + updateCamera();
124 + try {
125 + if (mQuality.resX>=640) {
126 + // Using the MediaCodec API with the buffer method for high resolutions is too slow
127 + mMode = MODE_MEDIARECORDER_API;
128 + }
129 + EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
130 + return new MP4Config(debugger.getB64SPS(), debugger.getB64PPS());
131 + } catch (Exception e) {
132 + // Fallback on the old streaming method using the MediaRecorder API
133 + Log.e(TAG,"Resolution not supported with the MediaCodec API, we fallback on the old streamign method.");
134 + mMode = MODE_MEDIARECORDER_API;
135 + return testH264();
136 + }
137 + }
138 +
139 + // Should not be called by the UI thread
140 + private MP4Config testMediaRecorderAPI() throws RuntimeException, IOException {
141 + String key = PREF_PREFIX+"h264-mr-"+mRequestedQuality.framerate+","+mRequestedQuality.resX+","+mRequestedQuality.resY;
142 +
143 + if (mSettings != null && mSettings.contains(key) ) {
144 + String[] s = mSettings.getString(key, "").split(",");
145 + return new MP4Config(s[0],s[1],s[2]);
146 + }
147 +
148 + if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
149 + throw new StorageUnavailableException("No external storage or external storage not ready !");
150 + }
151 +
152 + final String TESTFILE = Environment.getExternalStorageDirectory().getPath()+"/spydroid-test.mp4";
153 +
154 + Log.i(TAG,"Testing H264 support... Test file saved at: "+TESTFILE);
155 +
156 + try {
157 + File file = new File(TESTFILE);
158 + file.createNewFile();
159 + } catch (IOException e) {
160 + throw new StorageUnavailableException(e.getMessage());
161 + }
162 +
163 + // Save flash state & set it to false so that led remains off while testing h264
164 + boolean savedFlashState = mFlashEnabled;
165 + mFlashEnabled = false;
166 +
167 + boolean previewStarted = mPreviewStarted;
168 +
169 + boolean cameraOpen = mCamera!=null;
170 + createCamera();
171 +
172 + // Stops the preview if needed
173 + if (mPreviewStarted) {
174 + lockCamera();
175 + try {
176 + mCamera.stopPreview();
177 + } catch (Exception e) {}
178 + mPreviewStarted = false;
179 + }
180 +
181 + try {
182 + Thread.sleep(100);
183 + } catch (InterruptedException e1) {
184 + // TODO Auto-generated catch block
185 + e1.printStackTrace();
186 + }
187 +
188 + unlockCamera();
189 +
190 + try {
191 +
192 + mMediaRecorder = new MediaRecorder();
193 + mMediaRecorder.setCamera(mCamera);
194 + mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
195 + mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
196 + mMediaRecorder.setVideoEncoder(mVideoEncoder);
197 + mMediaRecorder.setPreviewDisplay(mSurfaceView.getHolder().getSurface());
198 + mMediaRecorder.setVideoSize(mRequestedQuality.resX,mRequestedQuality.resY);
199 + mMediaRecorder.setVideoFrameRate(mRequestedQuality.framerate);
200 + mMediaRecorder.setVideoEncodingBitRate((int)(mRequestedQuality.bitrate*0.8));
201 + mMediaRecorder.setOutputFile(TESTFILE);
202 + mMediaRecorder.setMaxDuration(3000);
203 +
204 + // We wait a little and stop recording
205 + mMediaRecorder.setOnInfoListener(new MediaRecorder.OnInfoListener() {
206 + public void onInfo(MediaRecorder mr, int what, int extra) {
207 + Log.d(TAG,"MediaRecorder callback called !");
208 + if (what==MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
209 + Log.d(TAG,"MediaRecorder: MAX_DURATION_REACHED");
210 + } else if (what==MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
211 + Log.d(TAG,"MediaRecorder: MAX_FILESIZE_REACHED");
212 + } else if (what==MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
213 + Log.d(TAG,"MediaRecorder: INFO_UNKNOWN");
214 + } else {
215 + Log.d(TAG,"WTF ?");
216 + }
217 + mLock.release();
218 + }
219 + });
220 +
221 + // Start recording
222 + mMediaRecorder.prepare();
223 + mMediaRecorder.start();
224 +
225 + if (mLock.tryAcquire(6,TimeUnit.SECONDS)) {
226 + Log.d(TAG,"MediaRecorder callback was called :)");
227 + Thread.sleep(400);
228 + } else {
229 + Log.d(TAG,"MediaRecorder callback was not called after 6 seconds... :(");
230 + }
231 + } catch (IOException e) {
232 + throw new ConfNotSupportedException(e.getMessage());
233 + } catch (RuntimeException e) {
234 + throw new ConfNotSupportedException(e.getMessage());
235 + } catch (InterruptedException e) {
236 + e.printStackTrace();
237 + } finally {
238 + try {
239 + mMediaRecorder.stop();
240 + } catch (Exception e) {}
241 + mMediaRecorder.release();
242 + mMediaRecorder = null;
243 + lockCamera();
244 + if (!cameraOpen) destroyCamera();
245 + // Restore flash state
246 + mFlashEnabled = savedFlashState;
247 + if (previewStarted) {
248 + // If the preview was started before the test, we try to restart it.
249 + try {
250 + startPreview();
251 + } catch (Exception e) {}
252 + }
253 + }
254 +
255 + // Retrieve SPS & PPS & ProfileId with MP4Config
256 + MP4Config config = new MP4Config(TESTFILE);
257 +
258 + // Delete dummy video
259 + File file = new File(TESTFILE);
260 + if (!file.delete()) Log.e(TAG,"Temp file could not be erased");
261 +
262 + Log.i(TAG,"H264 Test succeded...");
263 +
264 + // Save test result
265 + if (mSettings != null) {
266 + Editor editor = mSettings.edit();
267 + editor.putString(key, config.getProfileLevel()+","+config.getB64SPS()+","+config.getB64PPS());
268 + editor.commit();
269 + }
270 +
271 + return config;
272 +
273 + }
274 +
275 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.video;
20 +
21 +import java.util.Iterator;
22 +import java.util.List;
23 +import android.hardware.Camera;
24 +import android.hardware.Camera.Size;
25 +import android.util.Log;
26 +
27 +/**
28 + * A class that represents the quality of a video stream.
29 + * It contains the resolution, the framerate (in fps) and the bitrate (in bps) of the stream.
30 + */
31 +public class VideoQuality {
32 +
33 + public final static String TAG = "VideoQuality";
34 +
35 + /** Default video stream quality. */
36 + public final static VideoQuality DEFAULT_VIDEO_QUALITY = new VideoQuality(176,144,20,500000);
37 +
38 + /** Represents a quality for a video stream. */
39 + public VideoQuality() {}
40 +
41 + /**
42 + * Represents a quality for a video stream.
43 + * @param resX The horizontal resolution
44 + * @param resY The vertical resolution
45 + */
46 + public VideoQuality(int resX, int resY) {
47 + this.resX = resX;
48 + this.resY = resY;
49 + }
50 +
51 + /**
52 + * Represents a quality for a video stream.
53 + * @param resX The horizontal resolution
54 + * @param resY The vertical resolution
55 + * @param framerate The framerate in frame per seconds
56 + * @param bitrate The bitrate in bit per seconds
57 + */
58 + public VideoQuality(int resX, int resY, int framerate, int bitrate) {
59 + this.framerate = framerate;
60 + this.bitrate = bitrate;
61 + this.resX = resX;
62 + this.resY = resY;
63 + }
64 +
65 + public int framerate = 0;
66 + public int bitrate = 0;
67 + public int resX = 0;
68 + public int resY = 0;
69 +
70 + public boolean equals(VideoQuality quality) {
71 + if (quality==null) return false;
72 + return (quality.resX == this.resX &&
73 + quality.resY == this.resY &&
74 + quality.framerate == this.framerate &&
75 + quality.bitrate == this.bitrate);
76 + }
77 +
78 + public VideoQuality clone() {
79 + return new VideoQuality(resX,resY,framerate,bitrate);
80 + }
81 +
82 + public static VideoQuality parseQuality(String str) {
83 + VideoQuality quality = DEFAULT_VIDEO_QUALITY.clone();
84 + if (str != null) {
85 + String[] config = str.split("-");
86 + try {
87 + quality.bitrate = Integer.parseInt(config[0])*1000; // conversion to bit/s
88 + quality.framerate = Integer.parseInt(config[1]);
89 + quality.resX = Integer.parseInt(config[2]);
90 + quality.resY = Integer.parseInt(config[3]);
91 + }
92 + catch (IndexOutOfBoundsException ignore) {}
93 + }
94 + return quality;
95 + }
96 +
97 + public String toString() {
98 + return resX+"x"+resY+" px, "+framerate+" fps, "+bitrate/1000+" kbps";
99 + }
100 +
101 + /**
102 + * Checks if the requested resolution is supported by the camera.
103 + * If not, it modifies it by supported parameters.
104 + **/
105 + public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
106 + VideoQuality v = quality.clone();
107 + int minDist = Integer.MAX_VALUE;
108 + String supportedSizesStr = "Supported resolutions: ";
109 + List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
110 + for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
111 + Size size = it.next();
112 + supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
113 + int dist = Math.abs(quality.resX - size.width);
114 + if (dist<minDist) {
115 + minDist = dist;
116 + v.resX = size.width;
117 + v.resY = size.height;
118 + }
119 + }
120 + Log.v(TAG, supportedSizesStr);
121 + if (quality.resX != v.resX || quality.resY != v.resY) {
122 + Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
123 + }
124 +
125 + return v;
126 + }
127 +
128 + public static int[] determineMaximumSupportedFramerate(Camera.Parameters parameters) {
129 + int[] maxFps = new int[]{0,0};
130 + String supportedFpsRangesStr = "Supported frame rates: ";
131 + List<int[]> supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
132 + for (Iterator<int[]> it = supportedFpsRanges.iterator(); it.hasNext();) {
133 + int[] interval = it.next();
134 + // Intervals are returned as integers, for example "29970" means "29.970" FPS.
135 + supportedFpsRangesStr += interval[0]/1000+"-"+interval[1]/1000+"fps"+(it.hasNext()?", ":"");
136 + if (interval[1]>maxFps[1] || (interval[0]>maxFps[0] && interval[1]==maxFps[1])) {
137 + maxFps = interval;
138 + }
139 + }
140 + Log.v(TAG,supportedFpsRangesStr);
141 + return maxFps;
142 + }
143 +
144 +}
1 +/*
2 + * Copyright (C) 2011-2015 GUIGUI Simon, fyhertz@gmail.com
3 + *
4 + * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
5 + *
6 + * Licensed under the Apache License, Version 2.0 (the "License");
7 + * you may not use this file except in compliance with the License.
8 + * You may obtain a copy of the License at
9 + *
10 + * http://www.apache.org/licenses/LICENSE-2.0
11 + *
12 + * Unless required by applicable law or agreed to in writing, software
13 + * distributed under the License is distributed on an "AS IS" BASIS,
14 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 + * See the License for the specific language governing permissions and
16 + * limitations under the License.
17 + */
18 +
19 +package net.majorkernelpanic.streaming.video;
20 +
21 +import java.io.FileDescriptor;
22 +import java.io.IOException;
23 +import java.io.InputStream;
24 +import java.nio.ByteBuffer;
25 +import java.util.concurrent.Semaphore;
26 +import java.util.concurrent.TimeUnit;
27 +import net.majorkernelpanic.streaming.MediaStream;
28 +import net.majorkernelpanic.streaming.Stream;
29 +import net.majorkernelpanic.streaming.exceptions.CameraInUseException;
30 +import net.majorkernelpanic.streaming.exceptions.ConfNotSupportedException;
31 +import net.majorkernelpanic.streaming.exceptions.InvalidSurfaceException;
32 +import net.majorkernelpanic.streaming.gl.SurfaceView;
33 +import net.majorkernelpanic.streaming.hw.EncoderDebugger;
34 +import net.majorkernelpanic.streaming.hw.NV21Convertor;
35 +import net.majorkernelpanic.streaming.rtp.MediaCodecInputStream;
36 +import android.annotation.SuppressLint;
37 +import android.content.SharedPreferences;
38 +import android.content.SharedPreferences.Editor;
39 +import android.hardware.Camera;
40 +import android.hardware.Camera.CameraInfo;
41 +import android.hardware.Camera.Parameters;
42 +import android.media.MediaCodec;
43 +import android.media.MediaCodecInfo;
44 +import android.media.MediaFormat;
45 +import android.media.MediaRecorder;
46 +import android.os.Looper;
47 +import android.os.ParcelFileDescriptor;
48 +import android.util.Log;
49 +import android.view.Surface;
50 +import android.view.SurfaceHolder;
51 +import android.view.SurfaceHolder.Callback;
52 +
53 +/**
54 + * Don't use this class directly.
55 + */
56 +public abstract class VideoStream extends MediaStream {
57 +
58 + protected final static String TAG = "VideoStream";
59 +
60 + protected VideoQuality mRequestedQuality = VideoQuality.DEFAULT_VIDEO_QUALITY.clone();
61 + protected VideoQuality mQuality = mRequestedQuality.clone();
62 + protected SurfaceHolder.Callback mSurfaceHolderCallback = null;
63 + protected SurfaceView mSurfaceView = null;
64 + protected SharedPreferences mSettings = null;
65 + protected int mVideoEncoder, mCameraId = 0;
66 + protected int mRequestedOrientation = 0, mOrientation = 0;
67 + protected Camera mCamera;
68 + protected Thread mCameraThread;
69 + protected Looper mCameraLooper;
70 +
71 + protected boolean mCameraOpenedManually = true;
72 + protected boolean mFlashEnabled = false;
73 + protected boolean mSurfaceReady = false;
74 + protected boolean mUnlocked = false;
75 + protected boolean mPreviewStarted = false;
76 + protected boolean mUpdated = false;
77 +
78 + protected String mMimeType;
79 + protected String mEncoderName;
80 + protected int mEncoderColorFormat;
81 + protected int mCameraImageFormat;
82 + protected int mMaxFps = 0;
83 +
84 + /**
85 + * Don't use this class directly.
86 + * Uses CAMERA_FACING_BACK by default.
87 + */
88 + public VideoStream() {
89 + this(CameraInfo.CAMERA_FACING_BACK);
90 + }
91 +
92 + /**
93 + * Don't use this class directly
94 + * @param camera Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT
95 + */
96 + @SuppressLint("InlinedApi")
97 + public VideoStream(int camera) {
98 + super();
99 + setCamera(camera);
100 + }
101 +
102 + /**
103 + * Sets the camera that will be used to capture video.
104 + * You can call this method at any time and changes will take effect next time you start the stream.
105 + * @param camera Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT
106 + */
107 + public void setCamera(int camera) {
108 + CameraInfo cameraInfo = new CameraInfo();
109 + int numberOfCameras = Camera.getNumberOfCameras();
110 + for (int i=0;i<numberOfCameras;i++) {
111 + Camera.getCameraInfo(i, cameraInfo);
112 + if (cameraInfo.facing == camera) {
113 + mCameraId = i;
114 + break;
115 + }
116 + }
117 + }
118 +
119 + /** Switch between the front facing and the back facing camera of the phone.
120 + * If {@link #startPreview()} has been called, the preview will be briefly interrupted.
121 + * If {@link #start()} has been called, the stream will be briefly interrupted.
122 + * You should not call this method from the main thread if you are already streaming.
123 + * @throws IOException
124 + * @throws RuntimeException
125 + **/
126 + public void switchCamera() throws RuntimeException, IOException {
127 + if (Camera.getNumberOfCameras() == 1) throw new IllegalStateException("Phone only has one camera !");
128 + boolean streaming = mStreaming;
129 + boolean previewing = mCamera!=null && mCameraOpenedManually;
130 + mCameraId = (mCameraId == CameraInfo.CAMERA_FACING_BACK) ? CameraInfo.CAMERA_FACING_FRONT : CameraInfo.CAMERA_FACING_BACK;
131 + setCamera(mCameraId);
132 + stopPreview();
133 + mFlashEnabled = false;
134 + if (previewing) startPreview();
135 + if (streaming) start();
136 + }
137 +
138 + /**
139 + * Returns the id of the camera currently selected.
140 + * Can be either {@link CameraInfo#CAMERA_FACING_BACK} or
141 + * {@link CameraInfo#CAMERA_FACING_FRONT}.
142 + */
143 + public int getCamera() {
144 + return mCameraId;
145 + }
146 +
147 + public Camera getCameraObject() {return mCamera;}
148 +
149 + /**
150 + * Sets a Surface to show a preview of recorded media (video).
151 + * You can call this method at any time and changes will take effect next time you call {@link #start()}.
152 + */
153 + public synchronized void setSurfaceView(SurfaceView view) {
154 + mSurfaceView = view;
155 + if (mSurfaceHolderCallback != null && mSurfaceView != null && mSurfaceView.getHolder() != null) {
156 + mSurfaceView.getHolder().removeCallback(mSurfaceHolderCallback);
157 + }
158 + if (mSurfaceView != null && mSurfaceView.getHolder() != null) {
159 + mSurfaceHolderCallback = new Callback() {
160 + @Override
161 + public void surfaceDestroyed(SurfaceHolder holder) {
162 + mSurfaceReady = false;
163 + stopPreview();
164 + Log.d(TAG,"Surface destroyed !");
165 + }
166 + @Override
167 + public void surfaceCreated(SurfaceHolder holder) {
168 + mSurfaceReady = true;
169 + }
170 + @Override
171 + public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
172 + Log.d(TAG,"Surface Changed !");
173 + }
174 + };
175 + mSurfaceView.getHolder().addCallback(mSurfaceHolderCallback);
176 + mSurfaceReady = true;
177 + }
178 + }
179 +
180 + /** Turns the LED on or off if phone has one. */
181 + public synchronized void setFlashState(boolean state) {
182 + // If the camera has already been opened, we apply the change immediately
183 + if (mCamera != null) {
184 +
185 + if (mStreaming && mMode == MODE_MEDIARECORDER_API) {
186 + lockCamera();
187 + }
188 +
189 + Parameters parameters = mCamera.getParameters();
190 +
191 + // We test if the phone has a flash
192 + if (parameters.getFlashMode()==null) {
193 + // The phone has no flash or the choosen camera can not toggle the flash
194 + throw new RuntimeException("Can't turn the flash on !");
195 + } else {
196 + parameters.setFlashMode(state?Parameters.FLASH_MODE_TORCH:Parameters.FLASH_MODE_OFF);
197 + try {
198 + mCamera.setParameters(parameters);
199 + mFlashEnabled = state;
200 + } catch (RuntimeException e) {
201 + mFlashEnabled = false;
202 + throw new RuntimeException("Can't turn the flash on !");
203 + } finally {
204 + if (mStreaming && mMode == MODE_MEDIARECORDER_API) {
205 + unlockCamera();
206 + }
207 + }
208 + }
209 + } else {
210 + mFlashEnabled = state;
211 + }
212 + }
213 +
214 + /**
215 + * Toggles the LED of the phone if it has one.
216 + * You can get the current state of the flash with {@link VideoStream#getFlashState()}.
217 + */
218 + public synchronized void toggleFlash() {
219 + setFlashState(!mFlashEnabled);
220 + }
221 +
222 + /** Indicates whether or not the flash of the phone is on. */
223 + public boolean getFlashState() {
224 + return mFlashEnabled;
225 + }
226 +
227 + /**
228 + * Sets the orientation of the preview.
229 + * @param orientation The orientation of the preview
230 + */
231 + public void setPreviewOrientation(int orientation) {
232 + mRequestedOrientation = orientation;
233 + mUpdated = false;
234 + }
235 +
236 + /**
237 + * Sets the configuration of the stream. You can call this method at any time
238 + * and changes will take effect next time you call {@link #configure()}.
239 + * @param videoQuality Quality of the stream
240 + */
241 + public void setVideoQuality(VideoQuality videoQuality) {
242 + if (!mRequestedQuality.equals(videoQuality)) {
243 + mRequestedQuality = videoQuality.clone();
244 + mUpdated = false;
245 + }
246 + }
247 +
248 + /**
249 + * Returns the quality of the stream.
250 + */
251 + public VideoQuality getVideoQuality() {
252 + return mRequestedQuality;
253 + }
254 +
255 + /**
256 + * Some data (SPS and PPS params) needs to be stored when {@link #getSessionDescription()} is called
257 + * @param prefs The SharedPreferences that will be used to save SPS and PPS parameters
258 + */
259 + public void setPreferences(SharedPreferences prefs) {
260 + mSettings = prefs;
261 + }
262 +
263 + /**
264 + * Configures the stream. You need to call this before calling {@link #getSessionDescription()}
265 + * to apply your configuration of the stream.
266 + */
267 + public synchronized void configure() throws IllegalStateException, IOException {
268 + super.configure();
269 + mOrientation = mRequestedOrientation;
270 + }
271 +
272 + /**
273 + * Starts the stream.
274 + * This will also open the camera and display the preview
275 + * if {@link #startPreview()} has not already been called.
276 + */
277 + public synchronized void start() throws IllegalStateException, IOException {
278 + if (!mPreviewStarted) mCameraOpenedManually = false;
279 + super.start();
280 + Log.d(TAG,"Stream configuration: FPS: "+mQuality.framerate+" Width: "+mQuality.resX+" Height: "+mQuality.resY);
281 + }
282 +
283 + /** Stops the stream. */
284 + public synchronized void stop() {
285 + if (mCamera != null) {
286 + if (mMode == MODE_MEDIACODEC_API) {
287 + mCamera.setPreviewCallbackWithBuffer(null);
288 + }
289 + if (mMode == MODE_MEDIACODEC_API_2) {
290 + ((SurfaceView)mSurfaceView).removeMediaCodecSurface();
291 + }
292 + super.stop();
293 + // We need to restart the preview
294 + if (!mCameraOpenedManually) {
295 + destroyCamera();
296 + } else {
297 + try {
298 + startPreview();
299 + } catch (RuntimeException e) {
300 + e.printStackTrace();
301 + }
302 + }
303 + }
304 + }
305 +
306 + public synchronized void startPreview()
307 + throws CameraInUseException,
308 + InvalidSurfaceException,
309 + RuntimeException {
310 +
311 + mCameraOpenedManually = true;
312 + if (!mPreviewStarted) {
313 + createCamera();
314 + updateCamera();
315 + }
316 + }
317 +
318 + /**
319 + * Stops the preview.
320 + */
321 + public synchronized void stopPreview() {
322 + mCameraOpenedManually = false;
323 + stop();
324 + }
325 +
326 + /**
327 + * Video encoding is done by a MediaRecorder.
328 + */
329 + protected void encodeWithMediaRecorder() throws IOException, ConfNotSupportedException {
330 +
331 + Log.d(TAG,"Video encoded using the MediaRecorder API");
332 +
333 + // We need a local socket to forward data output by the camera to the packetizer
334 + createSockets();
335 +
336 + // Reopens the camera if needed
337 + destroyCamera();
338 + createCamera();
339 +
340 + // The camera must be unlocked before the MediaRecorder can use it
341 + unlockCamera();
342 +
343 + try {
344 + mMediaRecorder = new MediaRecorder();
345 + mMediaRecorder.setCamera(mCamera);
346 + mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
347 + mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
348 + mMediaRecorder.setVideoEncoder(mVideoEncoder);
349 + mMediaRecorder.setPreviewDisplay(mSurfaceView.getHolder().getSurface());
350 + mMediaRecorder.setVideoSize(mRequestedQuality.resX,mRequestedQuality.resY);
351 + mMediaRecorder.setVideoFrameRate(mRequestedQuality.framerate);
352 +
353 + // The bandwidth actually consumed is often above what was requested
354 + mMediaRecorder.setVideoEncodingBitRate((int)(mRequestedQuality.bitrate*0.8));
355 +
356 + // We write the output of the camera in a local socket instead of a file !
357 + // This one little trick makes streaming feasible quiet simply: data from the camera
358 + // can then be manipulated at the other end of the socket
359 + FileDescriptor fd = null;
360 + if (sPipeApi == PIPE_API_PFD) {
361 + fd = mParcelWrite.getFileDescriptor();
362 + } else {
363 + fd = mSender.getFileDescriptor();
364 + }
365 + mMediaRecorder.setOutputFile(fd);
366 +
367 + mMediaRecorder.prepare();
368 + mMediaRecorder.start();
369 +
370 + } catch (Exception e) {
371 + throw new ConfNotSupportedException(e.getMessage());
372 + }
373 +
374 + InputStream is = null;
375 +
376 + if (sPipeApi == PIPE_API_PFD) {
377 + is = new ParcelFileDescriptor.AutoCloseInputStream(mParcelRead);
378 + } else {
379 + is = mReceiver.getInputStream();
380 + }
381 +
382 + // This will skip the MPEG4 header if this step fails we can't stream anything :(
383 + try {
384 + byte buffer[] = new byte[4];
385 + // Skip all atoms preceding mdat atom
386 + while (!Thread.interrupted()) {
387 + while (is.read() != 'm');
388 + is.read(buffer,0,3);
389 + if (buffer[0] == 'd' && buffer[1] == 'a' && buffer[2] == 't') break;
390 + }
391 + } catch (IOException e) {
392 + Log.e(TAG,"Couldn't skip mp4 header :/");
393 + stop();
394 + throw e;
395 + }
396 +
397 + // The packetizer encapsulates the bit stream in an RTP stream and send it over the network
398 + mPacketizer.setInputStream(is);
399 + mPacketizer.start();
400 +
401 + mStreaming = true;
402 +
403 + }
404 +
405 +
406 + /**
407 + * Video encoding is done by a MediaCodec.
408 + */
409 + protected void encodeWithMediaCodec() throws RuntimeException, IOException {
410 + if (mMode == MODE_MEDIACODEC_API_2) {
411 + // Uses the method MediaCodec.createInputSurface to feed the encoder
412 + encodeWithMediaCodecMethod2();
413 + } else {
414 + // Uses dequeueInputBuffer to feed the encoder
415 + encodeWithMediaCodecMethod1();
416 + }
417 + }
418 +
419 + /**
420 + * Video encoding is done by a MediaCodec.
421 + */
422 + @SuppressLint("NewApi")
423 + protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException {
424 +
425 + Log.d(TAG,"Video encoded using the MediaCodec API with a buffer");
426 +
427 + // Updates the parameters of the camera if needed
428 + createCamera();
429 + updateCamera();
430 +
431 + // Estimates the frame rate of the camera
432 + measureFramerate();
433 +
434 + // Starts the preview if needed
435 + if (!mPreviewStarted) {
436 + try {
437 + mCamera.startPreview();
438 + mPreviewStarted = true;
439 + } catch (RuntimeException e) {
440 + destroyCamera();
441 + throw e;
442 + }
443 + }
444 +
445 + EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
446 + final NV21Convertor convertor = debugger.getNV21Convertor();
447 +
448 + mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
449 + MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
450 + mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
451 + mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
452 + mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,debugger.getEncoderColorFormat());
453 + mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
454 + mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
455 + mMediaCodec.start();
456 +
457 + Camera.PreviewCallback callback = new Camera.PreviewCallback() {
458 + long now = System.nanoTime()/1000, oldnow = now, i=0;
459 + ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
460 + @Override
461 + public void onPreviewFrame(byte[] data, Camera camera) {
462 +
463 + oldnow = now;
464 + now = System.nanoTime()/1000;
465 + if (i++>3) {
466 + i = 0;
467 + //Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps.");
468 + }
469 + try {
470 + int bufferIndex = mMediaCodec.dequeueInputBuffer(500000);
471 + if (bufferIndex>=0) {
472 + inputBuffers[bufferIndex].clear();
473 + if (data == null) Log.e(TAG,"Symptom of the \"Callback buffer was to small\" problem...");
474 + else convertor.convert(data, inputBuffers[bufferIndex]);
475 + mMediaCodec.queueInputBuffer(bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0);
476 + } else {
477 + Log.e(TAG,"No buffer available !");
478 + }
479 + } finally {
480 + mCamera.addCallbackBuffer(data);
481 + }
482 + }
483 + };
484 +
485 + for (int i=0;i<10;i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]);
486 + mCamera.setPreviewCallbackWithBuffer(callback);
487 +
488 + // The packetizer encapsulates the bit stream in an RTP stream and send it over the network
489 + mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
490 + mPacketizer.start();
491 +
492 + mStreaming = true;
493 +
494 + }
495 +
496 + /**
497 + * Video encoding is done by a MediaCodec.
498 + * But here we will use the buffer-to-surface method
499 + */
500 + @SuppressLint({ "InlinedApi", "NewApi" })
501 + protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException {
502 +
503 + Log.d(TAG,"Video encoded using the MediaCodec API with a surface");
504 +
505 + // Updates the parameters of the camera if needed
506 + createCamera();
507 + updateCamera();
508 +
509 + // Estimates the frame rate of the camera
510 + measureFramerate();
511 +
512 + EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
513 +
514 + mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
515 + MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
516 + mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
517 + mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
518 + mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
519 + mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
520 + mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
521 + Surface surface = mMediaCodec.createInputSurface();
522 + ((SurfaceView)mSurfaceView).addMediaCodecSurface(surface);
523 + mMediaCodec.start();
524 +
525 + // The packetizer encapsulates the bit stream in an RTP stream and send it over the network
526 + mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
527 + mPacketizer.start();
528 +
529 + mStreaming = true;
530 +
531 + }
532 +
533 + /**
534 + * Returns a description of the stream using SDP.
535 + * This method can only be called after {@link Stream#configure()}.
536 + * @throws IllegalStateException Thrown when {@link Stream#configure()} wa not called.
537 + */
538 + public abstract String getSessionDescription() throws IllegalStateException;
539 +
540 + /**
541 + * Opens the camera in a new Looper thread so that the preview callback is not called from the main thread
542 + * If an exception is thrown in this Looper thread, we bring it back into the main thread.
543 + * @throws RuntimeException Might happen if another app is already using the camera.
544 + */
545 + private void openCamera() throws RuntimeException {
546 + final Semaphore lock = new Semaphore(0);
547 + final RuntimeException[] exception = new RuntimeException[1];
548 + mCameraThread = new Thread(new Runnable() {
549 + @Override
550 + public void run() {
551 + Looper.prepare();
552 + mCameraLooper = Looper.myLooper();
553 + try {
554 + mCamera = Camera.open(mCameraId);
555 + } catch (RuntimeException e) {
556 + exception[0] = e;
557 + } finally {
558 + lock.release();
559 + Looper.loop();
560 + }
561 + }
562 + });
563 + mCameraThread.start();
564 + lock.acquireUninterruptibly();
565 + if (exception[0] != null) throw new CameraInUseException(exception[0].getMessage());
566 + }
567 +
568 + protected synchronized void createCamera() throws RuntimeException {
569 + if (mSurfaceView == null)
570 + throw new InvalidSurfaceException("Invalid surface !");
571 + if (mSurfaceView.getHolder() == null || !mSurfaceReady)
572 + throw new InvalidSurfaceException("Invalid surface !");
573 +
574 + if (mCamera == null) {
575 + openCamera();
576 + mUpdated = false;
577 + mUnlocked = false;
578 + mCamera.setErrorCallback(new Camera.ErrorCallback() {
579 + @Override
580 + public void onError(int error, Camera camera) {
581 + // On some phones when trying to use the camera facing front the media server will die
582 + // Whether or not this callback may be called really depends on the phone
583 + if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
584 + // In this case the application must release the camera and instantiate a new one
585 + Log.e(TAG,"Media server died !");
586 + // We don't know in what thread we are so stop needs to be synchronized
587 + mCameraOpenedManually = false;
588 + stop();
589 + } else {
590 + Log.e(TAG,"Error unknown with the camera: "+error);
591 + }
592 + }
593 + });
594 +
595 + try {
596 +
597 + // If the phone has a flash, we turn it on/off according to mFlashEnabled
598 + // setRecordingHint(true) is a very nice optimization if you plane to only use the Camera for recording
599 + Parameters parameters = mCamera.getParameters();
600 + if (parameters.getFlashMode()!=null) {
601 + parameters.setFlashMode(mFlashEnabled?Parameters.FLASH_MODE_TORCH:Parameters.FLASH_MODE_OFF);
602 + }
603 + parameters.setRecordingHint(true);
604 + mCamera.setParameters(parameters);
605 + mCamera.setDisplayOrientation(mOrientation);
606 +
607 + try {
608 + if (mMode == MODE_MEDIACODEC_API_2) {
609 + mSurfaceView.startGLThread();
610 + mCamera.setPreviewTexture(mSurfaceView.getSurfaceTexture());
611 + } else {
612 + mCamera.setPreviewDisplay(mSurfaceView.getHolder());
613 + }
614 + } catch (IOException e) {
615 + throw new InvalidSurfaceException("Invalid surface !");
616 + }
617 +
618 + } catch (RuntimeException e) {
619 + destroyCamera();
620 + throw e;
621 + }
622 +
623 + }
624 + }
625 +
626 + protected synchronized void destroyCamera() {
627 + if (mCamera != null) {
628 + if (mStreaming) super.stop();
629 + lockCamera();
630 + mCamera.stopPreview();
631 + try {
632 + mCamera.release();
633 + } catch (Exception e) {
634 + Log.e(TAG,e.getMessage()!=null?e.getMessage():"unknown error");
635 + }
636 + mCamera = null;
637 + mCameraLooper.quit();
638 + mUnlocked = false;
639 + mPreviewStarted = false;
640 + }
641 + }
642 +
643 + protected synchronized void updateCamera() throws RuntimeException {
644 +
645 + // The camera is already correctly configured
646 + if (mUpdated) return;
647 +
648 + if (mPreviewStarted) {
649 + mPreviewStarted = false;
650 + mCamera.stopPreview();
651 + }
652 +
653 + Parameters parameters = mCamera.getParameters();
654 + mQuality = VideoQuality.determineClosestSupportedResolution(parameters, mQuality);
655 + int[] max = VideoQuality.determineMaximumSupportedFramerate(parameters);
656 +
657 + double ratio = (double)mQuality.resX/(double)mQuality.resY;
658 + mSurfaceView.requestAspectRatio(ratio);
659 +
660 + parameters.setPreviewFormat(mCameraImageFormat);
661 + parameters.setPreviewSize(mQuality.resX, mQuality.resY);
662 + parameters.setPreviewFpsRange(max[0], max[1]);
663 +
664 + try {
665 + mCamera.setParameters(parameters);
666 + mCamera.setDisplayOrientation(mOrientation);
667 + mCamera.startPreview();
668 + mPreviewStarted = true;
669 + mUpdated = true;
670 + } catch (RuntimeException e) {
671 + destroyCamera();
672 + throw e;
673 + }
674 + }
675 +
676 + protected void lockCamera() {
677 + if (mUnlocked) {
678 + Log.d(TAG,"Locking camera");
679 + try {
680 + mCamera.reconnect();
681 + } catch (Exception e) {
682 + Log.e(TAG,e.getMessage());
683 + }
684 + mUnlocked = false;
685 + }
686 + }
687 +
688 + protected void unlockCamera() {
689 + if (!mUnlocked) {
690 + Log.d(TAG,"Unlocking camera");
691 + try {
692 + mCamera.unlock();
693 + } catch (Exception e) {
694 + Log.e(TAG,e.getMessage());
695 + }
696 + mUnlocked = true;
697 + }
698 + }
699 +
700 +
701 + /**
702 + * Computes the average frame rate at which the preview callback is called.
703 + * We will then use this average frame rate with the MediaCodec.
704 + * Blocks the thread in which this function is called.
705 + */
706 + private void measureFramerate() {
707 + final Semaphore lock = new Semaphore(0);
708 +
709 + final Camera.PreviewCallback callback = new Camera.PreviewCallback() {
710 + int i = 0, t = 0;
711 + long now, oldnow, count = 0;
712 + @Override
713 + public void onPreviewFrame(byte[] data, Camera camera) {
714 + i++;
715 + now = System.nanoTime()/1000;
716 + if (i>3) {
717 + t += now - oldnow;
718 + count++;
719 + }
720 + if (i>20) {
721 + mQuality.framerate = (int) (1000000/(t/count)+1);
722 + lock.release();
723 + }
724 + oldnow = now;
725 + }
726 + };
727 +
728 + mCamera.setPreviewCallback(callback);
729 +
730 + try {
731 + lock.tryAcquire(2,TimeUnit.SECONDS);
732 + Log.d(TAG,"Actual framerate: "+mQuality.framerate);
733 + if (mSettings != null) {
734 + Editor editor = mSettings.edit();
735 + editor.putInt(PREF_PREFIX+"fps"+mRequestedQuality.framerate+","+mCameraImageFormat+","+mRequestedQuality.resX+mRequestedQuality.resY, mQuality.framerate);
736 + editor.commit();
737 + }
738 + } catch (InterruptedException e) {}
739 +
740 + mCamera.setPreviewCallback(null);
741 +
742 + }
743 +
744 +}
1 +## This file is automatically generated by Android Studio.
2 +# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
3 +#
4 +# This file should *NOT* be checked into Version Control Systems,
5 +# as it contains information specific to your local configuration.
6 +#
7 +# Location of the SDK. This is only used by Gradle.
8 +# For customization when using a Version Control System, please read the
9 +# header note.
10 +sdk.dir=/Users/gwonjoohee/Library/Android/sdk
...\ No newline at end of file ...\ No newline at end of file
1 +rootProject.name='CCTedV'
2 +include ':app'
3 +include ':libstreaming'
1 +# CCTedV
2 +- 알림용 어플리케이션
3 +- 다운받은 후에 android studio로 코드 실행 가능
4 +
5 +# whatsUP
6 +- 알림용 어플리케이션
7 +- 다운받은 후에 android studio로 코드 실행 가능
8 +
9 +# file_server
10 +server for get frame data from android
11 +cd file_server
12 +- python3 -m venv env // 가상환경 설치
13 +- source env/bin/activate //가상환경 시작
14 +- pip install django
15 +- pip install djangorestframework
16 +
17 +- python manage.py runserver 0.0.0.0:5900 //5900 포트로 장고 프로젝트 시작
No preview for this file type
1 +from django.contrib import admin
2 +
3 +# Register your models here.
4 +from .models import Notification
5 +admin.site.register(Notification)
...\ No newline at end of file ...\ No newline at end of file
1 +from django.apps import AppConfig
2 +
3 +
4 +class DetectionConfig(AppConfig):
5 + name = 'detection'
1 +# Generated by Django 3.0.6 on 2020-06-13 14:33
2 +
3 +from django.db import migrations, models
4 +
5 +
6 +class Migration(migrations.Migration):
7 +
8 + initial = True
9 +
10 + dependencies = [
11 + ]
12 +
13 + operations = [
14 + migrations.CreateModel(
15 + name='Notification',
16 + fields=[
17 + ('created_at', models.DateTimeField(auto_created=True)),
18 + ('noti_id', models.AutoField(primary_key=True, serialize=False)),
19 + ('noti_type', models.CharField(max_length=50)),
20 + ('user', models.CharField(max_length=1000)),
21 + ],
22 + ),
23 + ]
1 +from django.db import models
2 +
3 +# Create your models here.
4 +class Notification(models.Model):
5 + noti_id = models.AutoField(primary_key=True) # name is filename without extension
6 + noti_type = models.CharField(max_length=50)
7 + user = models.CharField(max_length=1000)
8 + created_at = models.DateTimeField(auto_created=True)
1 +from django.test import TestCase
2 +
3 +# Create your tests here.
1 +from django.urls import path
2 +from .views import *
3 +
4 +urlpatterns = [
5 + path('', NotificationView.as_view()),
6 +]
...\ No newline at end of file ...\ No newline at end of file
1 +from rest_framework.views import APIView
2 +from django.http import HttpResponse
3 +import requests
4 +import json
5 +import firebase_admin
6 +from firebase_admin import credentials
7 +from firebase_admin import messaging
8 +from firebase_admin import datetime
9 +from django.apps import apps
10 +User = apps.get_model('user', 'User')
11 +
12 +cred = credentials.Certificate('./detection/whatsup-ad0b7-firebase-adminsdk-6yhd1-2e4fcd728a.json')
13 +default_app = firebase_admin.initialize_app(cred)
14 +topic = 'detection'
15 +
16 +"""
17 +Notification View는 fire_broken 또는 unknown_person이라는 이벤트 발생 시, 스파크에서 호출하는 api를 가진 view입니다.
18 +post함수를 이용하여 각 user의 알림용 device의 고유 토큰을 조회하여 알림을 보낼 수 있습니다.
19 +"""
20 +class NotificationView(APIView):
21 + def post(self, request, *args, **kwargs):
22 + if request.method == 'POST':
23 + timestamp = request.POST.get('timestamp', False)
24 + userId = request.POST.get('userId', False)
25 + detectionType = request.POST.get('detectionType',False)
26 + print("USER : ", userId)
27 + print("timestamp : ", timestamp)
28 + print("detectionType : ", detectionType)
29 +
30 + user = User.objects.get(userId=userId)
31 +
32 + bodyContent = ""
33 + if detectionType == "fire_broken":
34 + bodyContent = "불났어요 불났어요!! " + timestamp
35 + elif detectionType == "unknown_person":
36 + bodyContent = "침입자 발생!! " + timestamp
37 +
38 + message = messaging.Message(
39 + android=messaging.AndroidConfig(
40 + ttl=datetime.timedelta(seconds=3600),
41 + priority='normal',
42 + notification=messaging.AndroidNotification(
43 + title='삐뽀삐뽀',
44 + body = bodyContent,
45 + icon='',
46 + color='#f45342',
47 + sound='default'
48 + ),
49 + ),
50 + data={
51 + 'timestamp': timestamp
52 + },
53 + webpush=messaging.WebpushConfig(
54 + notification=messaging.WebpushNotification(
55 + title='웹 알림',
56 + body='여긴 어떨까',
57 + icon='',
58 + ),
59 + ),
60 + # topic=topic
61 + token=user.userToken
62 + )
63 +
64 + response = messaging.send(message)
65 + # Response is a message ID string.
66 + print('Successfully sent message:', response)
67 +
68 + return HttpResponse('notification_success')
69 +
70 + return HttpResponse('/notification_failure')
71 +
1 +from django.contrib import admin
2 +from .models import File
3 +# Register your models here.
4 +admin.site.register(File)
...\ No newline at end of file ...\ No newline at end of file
1 +from django.apps import AppConfig
2 +
3 +
4 +class FileConfig(AppConfig):
5 + name = 'file'
1 +from django import forms
2 +
3 +from .models import File
4 +
5 +class UploadFileForm(forms.ModelForm):
6 + class Meta:
7 + model = File
8 + fields = ('userName', 'file')
1 +# Generated by Django 3.0.6 on 2020-05-08 08:19
2 +
3 +from django.db import migrations, models
4 +
5 +
6 +class Migration(migrations.Migration):
7 +
8 + initial = True
9 +
10 + dependencies = [
11 + ]
12 +
13 + operations = [
14 + migrations.CreateModel(
15 + name='UploadFileModel',
16 + fields=[
17 + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
18 + ('fileName', models.TextField(default='')),
19 + ('file', models.FileField(null=True, upload_to='')),
20 + ],
21 + ),
22 + ]
1 +# Generated by Django 3.0.6 on 2020-05-08 08:44
2 +
3 +from django.db import migrations, models
4 +
5 +
6 +class Migration(migrations.Migration):
7 +
8 + dependencies = [
9 + ('file', '0001_initial'),
10 + ]
11 +
12 + operations = [
13 + migrations.CreateModel(
14 + name='File',
15 + fields=[
16 + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
17 + ('file', models.FileField(upload_to='')),
18 + ('name', models.CharField(max_length=100)),
19 + ('version', models.IntegerField(default=0)),
20 + ('upload_date', models.DateTimeField(auto_now=True, db_index=True)),
21 + ],
22 + ),
23 + migrations.DeleteModel(
24 + name='UploadFileModel',
25 + ),
26 + ]
1 +# Generated by Django 3.0.6 on 2020-05-08 09:38
2 +
3 +from django.db import migrations
4 +
5 +
6 +class Migration(migrations.Migration):
7 +
8 + dependencies = [
9 + ('file', '0002_auto_20200508_0844'),
10 + ]
11 +
12 + operations = [
13 + migrations.RenameField(
14 + model_name='file',
15 + old_name='name',
16 + new_name='userName',
17 + ),
18 + migrations.RemoveField(
19 + model_name='file',
20 + name='version',
21 + ),
22 + ]
1 +# Generated by Django 3.0.6 on 2020-05-08 10:05
2 +
3 +from django.db import migrations, models
4 +
5 +
6 +class Migration(migrations.Migration):
7 +
8 + dependencies = [
9 + ('file', '0003_auto_20200508_0938'),
10 + ]
11 +
12 + operations = [
13 + migrations.AlterField(
14 + model_name='file',
15 + name='file',
16 + field=models.FileField(upload_to=models.CharField(max_length=100)),
17 + ),
18 + ]
1 +# Generated by Django 3.0.6 on 2020-05-08 10:09
2 +
3 +from django.db import migrations, models
4 +
5 +
6 +class Migration(migrations.Migration):
7 +
8 + dependencies = [
9 + ('file', '0004_auto_20200508_1005'),
10 + ]
11 +
12 + operations = [
13 + migrations.AlterField(
14 + model_name='file',
15 + name='file',
16 + field=models.FileField(upload_to={models.CharField(max_length=100)}),
17 + ),
18 + ]
1 +# Generated by Django 3.0.6 on 2020-05-08 10:13
2 +
3 +from django.db import migrations, models
4 +import file.models
5 +
6 +
7 +class Migration(migrations.Migration):
8 +
9 + dependencies = [
10 + ('file', '0005_auto_20200508_1009'),
11 + ]
12 +
13 + operations = [
14 + migrations.AlterField(
15 + model_name='file',
16 + name='file',
17 + field=models.FileField(upload_to=file.models.File.user_directory_path),
18 + ),
19 + ]
1 +from django.db import models
2 +
3 +
4 +class File(models.Model):
5 + def user_directory_path(instance, file):
6 + print("INSTANCE" , instance)
7 + # file will be uploaded to MEDIA_ROOT/user_<id>/<filename>
8 + return '{0}/{1}'.format(instance.userName,file)
9 +
10 + userName = models.CharField(max_length=100) # name is filename without extension
11 + upload_date = models.DateTimeField(auto_now=True, db_index=True)
12 + file = models.FileField(upload_to=user_directory_path)
13 +
14 + def __str__(self):
15 + return self.userName
...\ No newline at end of file ...\ No newline at end of file
1 +import os
2 +from .models import File
3 +from rest_framework import serializers
4 +
5 +
6 +class FileUploaderSerializer(serializers.ModelSerializer):
7 + class Meta:
8 + model=File
9 + fields='__all__'
10 + # read_only_fields = '__all__'
11 + def validate(self, validated_data):
12 + validated_data['name'] = os.path.splitext(validated_data['file'].name)[0]
13 + return validated_data
14 + def create(self,validated_data):
15 + return File.objects.create()
...\ No newline at end of file ...\ No newline at end of file
1 +from django.test import TestCase
2 +
3 +# Create your tests here.
1 +# from django.urls import path, include
2 +# from rest_framework.routers import DefaultRouter
3 +# from file import views
4 +#
5 +# # The API URLs are now determined automatically by the router.
6 +# urlpatterns = [
7 +# path('file', views.upload_file, name='upload_file'),
8 +# ]
9 +
10 +from django.urls import path
11 +from .views import *
12 +
13 +urlpatterns = [
14 + path('', FileUploadView.as_view()),
15 +]
...\ No newline at end of file ...\ No newline at end of file
1 +from rest_framework.parsers import FileUploadParser
2 +from rest_framework.views import APIView
3 +from .forms import UploadFileForm
4 +from django.http import HttpResponse
5 +import base64
6 +import json
7 +import os
8 +from kafka import KafkaProducer
9 +from kafka.errors import KafkaError
10 +
11 +"""
12 +File Upload View는 전송받은 프레임 데이터를 카프카에 프로듀스하는 클래스입니다.
13 +post api를 안드로이드에서 호출하면 해당 데이터를 jsong Object를 str으로 dumps하여 프로듀스 진행합니다.
14 +"""
15 +class FileUploadView(APIView):
16 + parser_class = (FileUploadParser,)
17 +
18 + tog = True
19 +
20 + def post(self, request, *args, **kwargs):
21 + if request.method == 'POST':
22 + print("---- data in ----")
23 + befEncoding = request.POST['befEncoding']
24 + userId = request.POST['userId']
25 + timeStamp = request.POST['timeStamp']
26 +
27 + dict_data = {'data': befEncoding, 'userId': userId, 'timestamp': timeStamp}
28 +
29 + if not os.path.exists('./media/'+userId):
30 + os.makedirs('./media/'+userId)
31 +
32 + with open("media/"+userId+"/"+userId+"_"+timeStamp+".json", "w") as file:
33 + json.dump(dict_data, file, indent="\t")
34 +
35 + producer = KafkaProducer(bootstrap_servers=['1.201.142.81:9092'], max_request_size=209717600)
36 + jsonObject = json.dumps(dict_data).encode('utf-8')
37 + global tog
38 + FileUploadView.tog = self.toggle( FileUploadView.tog)
39 + # print(type(FileUploadView.tog))
40 + # if FileUploadView.tog == True:
41 + # future = producer.send('test4', jsonObject)
42 + # elif FileUploadView.tog == False:
43 + # future = producer.send('test98', jsonObject)
44 +
45 + future = producer.send('test4', jsonObject)
46 + try:
47 + record_metadata = future.get(timeout=10)
48 + except KafkaError as err:
49 + # Decide what to do if produce request failed…
50 + print(err)
51 + # log.exception()
52 + pass
53 +
54 + # Successful result returns assigned partition and offset
55 + print("TOPIC : ", record_metadata.topic)
56 + print("Partition :", record_metadata.partition)
57 + print("Offset : ", record_metadata.offset)
58 + print("---- process exit ----")
59 +
60 + return HttpResponse('save_success')
61 + else:
62 + form = UploadFileForm()
63 + # return render(request, 'upload.html', {'form': form})
64 + return HttpResponse('/upload_failure')
65 +
66 + def toggle(self, tog):
67 + return not tog
1 +"""
2 +Django settings for file_server project.
3 +
4 +Generated by 'django-admin startproject' using Django 2.2.4.
5 +
6 +For more information on this file, see
7 +https://docs.djangoproject.com/en/2.2/topics/settings/
8 +
9 +For the full list of settings and their values, see
10 +https://docs.djangoproject.com/en/2.2/ref/settings/
11 +"""
12 +
13 +import os
14 +
15 +# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
16 +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
17 +
18 +
19 +# Quick-start development settings - unsuitable for production
20 +# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
21 +
22 +# SECURITY WARNING: keep the secret key used in production secret!
23 +SECRET_KEY = 'y-yt*2es6821o$-5*d01epjpv2jb^^h@uo58or!=%0ijepzaww'
24 +
25 +# SECURITY WARNING: don't run with debug turned on in production!
26 +DEBUG = True
27 +
28 +ALLOWED_HOSTS = ['victoria.khunet.net', 'localhost', '127.0.0.1', '1.201.143.22']
29 +
30 +DATA_UPLOAD_MAX_MEMORY_SIZE = 30000000
31 +FILE_UPLOAD_MAX_MEMORY_SIZE = 30000000
32 +
33 +# Application definition
34 +
35 +INSTALLED_APPS = [
36 + 'django.contrib.admin',
37 + 'django.contrib.auth',
38 + 'django.contrib.contenttypes',
39 + 'django.contrib.sessions',
40 + 'django.contrib.messages',
41 + 'django.contrib.staticfiles',
42 + 'rest_framework',
43 + 'file.apps.FileConfig',
44 + 'detection.apps.DetectionConfig',
45 + 'user.apps.UserConfig',
46 + 'corsheaders',
47 +]
48 +
49 +MIDDLEWARE = [
50 + 'django.middleware.security.SecurityMiddleware',
51 + 'django.contrib.sessions.middleware.SessionMiddleware',
52 + 'django.middleware.common.CommonMiddleware',
53 + 'django.middleware.csrf.CsrfViewMiddleware',
54 + 'django.contrib.auth.middleware.AuthenticationMiddleware',
55 + 'django.contrib.messages.middleware.MessageMiddleware',
56 + 'django.middleware.clickjacking.XFrameOptionsMiddleware',
57 + 'corsheaders.middleware.CorsMiddleware',
58 + 'django.middleware.common.CommonMiddleware',
59 +]
60 +
61 +ROOT_URLCONF = 'file_server.urls'
62 +
63 +TEMPLATES = [
64 + {
65 + 'BACKEND': 'django.template.backends.django.DjangoTemplates',
66 + 'DIRS': [],
67 + 'APP_DIRS': True,
68 + 'OPTIONS': {
69 + 'context_processors': [
70 + 'django.template.context_processors.debug',
71 + 'django.template.context_processors.request',
72 + 'django.contrib.auth.context_processors.auth',
73 + 'django.contrib.messages.context_processors.messages',
74 + ],
75 + },
76 + },
77 +]
78 +
79 +WSGI_APPLICATION = 'file_server.wsgi.application'
80 +
81 +
82 +# Database
83 +# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
84 +
85 +DATABASES = {
86 + 'default': {
87 + 'ENGINE': 'django.db.backends.sqlite3',
88 + 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
89 + }
90 +}
91 +
92 +
93 +# Password validation
94 +# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
95 +
96 +AUTH_PASSWORD_VALIDATORS = [
97 + {
98 + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
99 + },
100 + {
101 + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
102 + },
103 + {
104 + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
105 + },
106 + {
107 + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
108 + },
109 +]
110 +
111 +
112 +# Internationalization
113 +# https://docs.djangoproject.com/en/2.2/topics/i18n/
114 +
115 +LANGUAGE_CODE = 'en-us'
116 +
117 +TIME_ZONE = 'UTC'
118 +
119 +USE_I18N = True
120 +
121 +USE_L10N = True
122 +
123 +USE_TZ = True
124 +
125 +
126 +# Static files (CSS, JavaScript, Images)
127 +# https://docs.djangoproject.com/en/2.2/howto/static-files/
128 +MEDIA_URL = '/media/'
129 +MEDIA_ROOT = os.path.join(BASE_DIR, "media")
130 +
131 +STATIC_URL = '/static/'
132 +
133 +CORS_ALLOW_METHODS = [
134 + 'DELETE',
135 + 'GET',
136 + 'OPTIONS',
137 + 'PATCH',
138 + 'POST',
139 + 'PUT',
140 +]
141 +CORS_ORIGIN_ALLOW_ALL = True # If this is used then `CORS_ORIGIN_WHITELIST` will not have any effect
142 +CORS_ALLOW_CREDENTIALS = True
...\ No newline at end of file ...\ No newline at end of file
1 +from django.contrib import admin
2 +from django.urls import path, include
3 +from django.conf import settings
4 +from django.conf.urls.static import static
5 +
6 +urlpatterns = [
7 + path('admin/', admin.site.urls),
8 + path('upload', include('file.urls')),
9 + path('notificate', include('detection.urls')),
10 + path('user', include('user.urls')),
11 +
12 +]
13 +if settings.DEBUG:
14 + urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
...\ No newline at end of file ...\ No newline at end of file
1 +"""
2 +WSGI config for file_server project.
3 +
4 +It exposes the WSGI callable as a module-level variable named ``application``.
5 +
6 +For more information on this file, see
7 +https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
8 +"""
9 +
10 +import os
11 +
12 +from django.core.wsgi import get_wsgi_application
13 +
14 +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'file_server.settings')
15 +
16 +application = get_wsgi_application()
1 +#!/usr/bin/env python
2 +"""Django's command-line utility for administrative tasks."""
3 +import os
4 +import sys
5 +
6 +
7 +def main():
8 + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'file_server.settings')
9 + try:
10 + from django.core.management import execute_from_command_line
11 + except ImportError as exc:
12 + raise ImportError(
13 + "Couldn't import Django. Are you sure it's installed and "
14 + "available on your PYTHONPATH environment variable? Did you "
15 + "forget to activate a virtual environment?"
16 + ) from exc
17 + execute_from_command_line(sys.argv)
18 +
19 +
20 +if __name__ == '__main__':
21 + main()
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
1 +{
2 + "data": "bef",
3 + "userId": "victoria",
4 + "timestamp": "20200505"
5 +}
...\ No newline at end of file ...\ No newline at end of file
1 +from django.contrib import admin
2 +
3 +from .models import User
4 +admin.site.register(User)
...\ No newline at end of file ...\ No newline at end of file
1 +from django.apps import AppConfig
2 +
3 +
4 +class UserConfig(AppConfig):
5 + name = 'user'
1 +# Generated by Django 3.0.6 on 2020-06-02 07:33
2 +
3 +from django.db import migrations, models
4 +
5 +
6 +class Migration(migrations.Migration):
7 +
8 + initial = True
9 +
10 + dependencies = [
11 + ]
12 +
13 + operations = [
14 + migrations.CreateModel(
15 + name='File',
16 + fields=[
17 + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
18 + ('userId', models.CharField(max_length=100)),
19 + ('userToken', models.CharField(max_length=500)),
20 + ],
21 + ),
22 + ]
1 +# Generated by Django 3.0.6 on 2020-06-02 07:44
2 +
3 +from django.db import migrations, models
4 +
5 +
6 +class Migration(migrations.Migration):
7 +
8 + dependencies = [
9 + ('user', '0001_initial'),
10 + ]
11 +
12 + operations = [
13 + migrations.CreateModel(
14 + name='User',
15 + fields=[
16 + ('userId', models.CharField(max_length=100, primary_key=True, serialize=False)),
17 + ('userToken', models.CharField(max_length=500)),
18 + ],
19 + ),
20 + migrations.DeleteModel(
21 + name='File',
22 + ),
23 + ]
1 +from django.db import models
2 +
3 +class User(models.Model):
4 + userId = models.CharField(max_length=100, primary_key=True) # name is filename without extension
5 + userToken = models.CharField(max_length=500)
1 +from django.test import TestCase
2 +
3 +# Create your tests here.
1 +from django.urls import path
2 +from .views import *
3 +
4 +urlpatterns = [
5 + path('', UserView.as_view()),
6 +
7 +]
...\ No newline at end of file ...\ No newline at end of file
1 +from django.shortcuts import render
2 +from rest_framework.views import APIView
3 +from django.http import HttpResponse
4 +from django.db import models
5 +from .models import *
6 +from annoying.functions import get_object_or_None
7 +
8 +# Create your views here.
9 +"""
10 +User View는 사용자별 id와 그 사용자의 알림용 device의 고유 토큰을 세팅할 수 있는 view입니다.
11 +"""
12 +class UserView(APIView):
13 + def post(self, request, *args, **kwargs):
14 + if request.method == 'POST':
15 + userId = request.POST['userId']
16 + userToken = request.POST['userToken']
17 + print("ALL USER : ", User.objects.all())
18 +
19 + user = get_object_or_None(User, userId=userId)
20 + print("user : ", user)
21 + if user != None:
22 + print(user)
23 + if userToken != '': #user에 토큰 입
24 + user.userToken = userToken
25 + user.save()
26 + return HttpResponse('token_enroll_success')
27 + return HttpResponse('/user_enroll_failure')
28 +
29 + user = User(userId=userId)
30 + user.save()
31 + return HttpResponse('userId_enroll_success', user)
32 +
33 + return HttpResponse('/user_enroll_failure')
...\ No newline at end of file ...\ No newline at end of file
1 +# whatsUp
2 + whatsUp for alarm CCTedV
1 +apply plugin: 'com.android.application'
2 +
3 +android {
4 + compileSdkVersion 28
5 +
6 +
7 + defaultConfig {
8 + applicationId "com.example.whatsup"
9 + minSdkVersion 22
10 + targetSdkVersion 28
11 + versionCode 1
12 + versionName "1.0"
13 +
14 + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
15 + }
16 +
17 + buildTypes {
18 + release {
19 + minifyEnabled false
20 + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
21 + }
22 + }
23 +
24 +}
25 +
26 +dependencies {
27 + implementation fileTree(dir: 'libs', include: ['*.jar'])
28 + implementation 'androidx.appcompat:appcompat:1.1.0'
29 + implementation 'com.google.android.material:material:1.1.0'
30 + implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
31 + implementation 'androidx.navigation:navigation-fragment:2.0.0'
32 + implementation 'androidx.navigation:navigation-ui:2.0.0'
33 + implementation 'androidx.lifecycle:lifecycle-extensions:2.0.0'
34 + testImplementation 'junit:junit:4.12'
35 + androidTestImplementation 'androidx.test.ext:junit:1.1.1'
36 + androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
37 + implementation 'com.google.firebase:firebase-core:17.0.0'
38 + implementation 'com.google.firebase:firebase-messaging:20.0.0'
39 + implementation("com.squareup.okhttp3:okhttp:4.6.0")
40 +
41 +
42 +
43 +}
44 +apply plugin: 'com.google.gms.google-services'
45 +
1 +# Add project specific ProGuard rules here.
2 +# You can control the set of applied configuration files using the
3 +# proguardFiles setting in build.gradle.
4 +#
5 +# For more details, see
6 +# http://developer.android.com/guide/developing/tools/proguard.html
7 +
8 +# If your project uses WebView with JS, uncomment the following
9 +# and specify the fully qualified class name to the JavaScript interface
10 +# class:
11 +#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 +# public *;
13 +#}
14 +
15 +# Uncomment this to preserve the line number information for
16 +# debugging stack traces.
17 +#-keepattributes SourceFile,LineNumberTable
18 +
19 +# If you keep the line number information, uncomment this to
20 +# hide the original source file name.
21 +#-renamesourcefileattribute SourceFile
1 +package com.example.whatsup;
2 +
3 +import android.content.Context;
4 +
5 +import androidx.test.platform.app.InstrumentationRegistry;
6 +import androidx.test.ext.junit.runners.AndroidJUnit4;
7 +
8 +import org.junit.Test;
9 +import org.junit.runner.RunWith;
10 +
11 +import static org.junit.Assert.*;
12 +
13 +/**
14 + * Instrumented test, which will execute on an Android device.
15 + *
16 + * @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
17 + */
18 +@RunWith(AndroidJUnit4.class)
19 +public class ExampleInstrumentedTest {
20 + @Test
21 + public void useAppContext() {
22 + // Context of the app under test.
23 + Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
24 +
25 + assertEquals("com.example.whatsup", appContext.getPackageName());
26 + }
27 +}
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<manifest xmlns:android="http://schemas.android.com/apk/res/android"
3 + package="com.example.whatsup">
4 +
5 + <application
6 + android:allowBackup="true"
7 + android:icon="@mipmap/ic_launcher"
8 + android:label="@string/app_name"
9 + android:roundIcon="@mipmap/ic_launcher_round"
10 + android:supportsRtl="true"
11 + android:theme="@style/AppTheme"
12 + android:usesCleartextTraffic="true">
13 +
14 + <activity
15 + android:name=".MainActivity"
16 + android:label="@string/app_name">
17 + <intent-filter>
18 + <action android:name="android.intent.action.MAIN" />
19 + <category android:name="android.intent.category.LAUNCHER" />
20 + </intent-filter>
21 + </activity>
22 +
23 + <service
24 + android:name=".MyFirebaseMessagingService"
25 + android:enabled="true"
26 + android:exported="false">
27 + <intent-filter>
28 + <action android:name="com.google.firebase.MESSAGING_EVENT" />
29 + <action android:name="com.google.firebase.INSTANCE_ID_EVENT"/>
30 +
31 + </intent-filter>
32 + </service>
33 +
34 + </application>
35 + <!-- Set custom default icon. This is used when no icon is set for incoming notification messages.
36 + See README(https://goo.gl/l4GJaQ) for more. -->
37 + <meta-data
38 + android:name="com.google.firebase.messaging.default_notification_icon"
39 + android:resource="@drawable/ic_notifications_black_24dp" />
40 +
41 + <meta-data
42 + android:name="com.google.firebase.messaging.default_notification_color"
43 + android:resource="@color/colorAccent" />
44 +
45 + <meta-data
46 + android:name="com.google.firebase.messaging.default_notification_channel_id"
47 + android:value="@string/default_notification_channel_id"/>
48 +
49 + <uses-permission android:name="android.permission.INTERNET" />
50 +
51 +
52 +</manifest>
...\ No newline at end of file ...\ No newline at end of file
1 +package com.example.whatsup;
2 +
3 +import android.os.AsyncTask;
4 +import android.os.Bundle;
5 +import android.util.Log;
6 +import android.view.View;
7 +import android.widget.Button;
8 +import android.widget.EditText;
9 +import android.widget.Toast;
10 +
11 +import com.google.android.gms.tasks.OnCompleteListener;
12 +import com.google.android.gms.tasks.OnSuccessListener;
13 +import com.google.android.material.bottomnavigation.BottomNavigationView;
14 +import com.google.firebase.FirebaseApp;
15 +import com.google.firebase.iid.FirebaseInstanceId;
16 +import com.google.firebase.iid.InstanceIdResult;
17 +
18 +import androidx.annotation.NonNull;
19 +import androidx.appcompat.app.AppCompatActivity;
20 +import androidx.navigation.NavController;
21 +import androidx.navigation.Navigation;
22 +import androidx.navigation.ui.AppBarConfiguration;
23 +import androidx.navigation.ui.NavigationUI;
24 +
25 +public class MainActivity extends AppCompatActivity {
26 + /*
27 + * 이 어플리케이션은 알림용 어플리케이션입니다.
28 + * btn_1을 클릭하면, 해당 디바이스의 고유 토큰이 서버 데이터베이스에 세팅됩니다.
29 + * */
30 + @Override
31 + protected void onCreate(Bundle savedInstanceState) {
32 + super.onCreate(savedInstanceState);
33 + setContentView(R.layout.activity_main);
34 + BottomNavigationView navView = findViewById(R.id.nav_view);
35 + AppBarConfiguration appBarConfiguration = new AppBarConfiguration.Builder(
36 + R.id.navigation_home, R.id.navigation_dashboard, R.id.navigation_notifications)
37 + .build();
38 + NavController navController = Navigation.findNavController(this, R.id.nav_host_fragment);
39 + NavigationUI.setupActionBarWithNavController(this, navController, appBarConfiguration);
40 + NavigationUI.setupWithNavController(navView, navController);
41 + final EditText mEdit;
42 +
43 + FirebaseApp.initializeApp(this);
44 + mEdit = (EditText)findViewById(R.id.userId);
45 +
46 + FirebaseInstanceId.getInstance().getInstanceId().addOnSuccessListener(this,
47 + new OnSuccessListener<InstanceIdResult>() {
48 + @Override
49 + public void onSuccess(InstanceIdResult instanceIdResult) {
50 + String newToken = instanceIdResult.getToken();
51 + Log.d("Token", "새토큰" + newToken );
52 + }
53 + }
54 +
55 + );
56 +
57 + // 버튼을 눌렀을 경우, 저장된 토큰을 가지고 오는 메소드를 설정합니다.
58 + Button btn_1 = findViewById(R.id.button);
59 + btn_1.setOnClickListener(new View.OnClickListener(){
60 + @Override
61 + public void onClick(View v) {
62 + String savedToken = FirebaseInstanceId.getInstance().getToken();
63 + Singleton.getInstance().setUserId(mEdit.getText().toString());
64 + Singleton.getInstance().setUserToken(savedToken);
65 + String url = "http://victoria.khunet.net:5900/user";
66 + final AsyncTask<Void, Void, String> execute = new NetworkTask(url).execute();
67 +
68 + Log.d("Button", "등록되어 있는 토큰ID:" + savedToken);
69 + }
70 + });
71 + }
72 +
73 +}
1 +package com.example.whatsup;
2 +
3 +import android.app.Notification;
4 +import android.app.NotificationChannel;
5 +import android.app.NotificationManager;
6 +import android.app.PendingIntent;
7 +import android.app.Service;
8 +import android.app.TaskStackBuilder;
9 +import android.content.Context;
10 +import android.content.Intent;
11 +import android.os.Build;
12 +import android.os.IBinder;
13 +import android.util.Log;
14 +
15 +import com.google.firebase.messaging.FirebaseMessagingService;
16 +import com.google.firebase.messaging.RemoteMessage;
17 +
18 +import androidx.core.app.NotificationCompat;
19 +
20 +public class MyFirebaseMessagingService extends FirebaseMessagingService {
21 +
22 + private static final String TAG = "FCM";
23 + /*
24 + * 사용자 디바이스에 파이어베이스 노티피케이션을 받는 Service Layer 계층입니다.
25 + * */
26 + public MyFirebaseMessagingService() {
27 +
28 + }
29 +
30 + // 새로운 토큰을 확인했을 때 호출되는 메소드.
31 + @Override
32 + public void onNewToken(String token) {
33 + super.onNewToken(token);
34 + Singleton.getInstance().setUserToken(token);
35 +
36 + // 토큰 정보를 출력합니다.
37 + Log.e(TAG, "onNewToken 호출됨: " + token);
38 +
39 + }
40 +
41 +
42 + // 새로운 메시지를 받았을 때 호출되는 메소드.
43 + @Override
44 + public void onMessageReceived(RemoteMessage remoteMessage) {
45 + super.onMessageReceived(remoteMessage);
46 + Log.i("hello", "??");
47 + // 일단 받은 데이터 중, 내용만 가지고와 출력하는 메소드 입니다. (파이어 베이스 홈페이지에서 보내면 데이터는 값이 없을 수 있습니다.)
48 + String from = remoteMessage.getFrom();
49 + Log.d(TAG,
50 + "title:" + remoteMessage.getNotification().getTitle()
51 + + ", body:" + remoteMessage.getNotification().getBody()
52 + + ", data:" + remoteMessage.getData()
53 + );
54 +
55 + // 액티비티 쪽으로 메시지를 전달하는 메소드를 호출합니다.
56 + sendToActivity(
57 + getApplicationContext()
58 + , remoteMessage.getFrom()
59 + , remoteMessage.getNotification().getTitle()
60 + , remoteMessage.getNotification().getBody()
61 + , remoteMessage.getData().toString()
62 + );
63 +
64 + sendNotification(remoteMessage.getNotification().getTitle().toString(), remoteMessage.getNotification().getBody().toString());
65 +
66 + }
67 +
68 +
69 + // Activity 쪽으로 메소드를 전달하는 메소드 입니다.
70 + private void sendToActivity(Context context, String from, String title, String body, String contents ){
71 +
72 + Intent intent = new Intent(context, ResultActivity.class);
73 + intent.putExtra("from", from);
74 + intent.putExtra("title", title);
75 + intent.putExtra("body", body);
76 + intent.putExtra("contents", contents);
77 +
78 + intent.addFlags(
79 + Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_CLEAR_TOP
80 + );
81 +
82 + Log.i(TAG, contents);
83 + context.startActivity(intent);
84 +
85 +
86 + }
87 +
88 + private void sendNotification(String tit, String body) {
89 +
90 + String title = tit;
91 + String message = body;
92 +
93 + /**
94 + * 오레오 버전부터는 Notification Channel이 없으면 푸시가 생성되지 않는 현상이 있습니다.
95 + * **/
96 + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
97 +
98 + String channel = "What's Up";
99 + String channel_nm = "fire_detection";
100 +
101 + NotificationManager notichannel = (android.app.NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
102 + NotificationChannel channelMessage = new NotificationChannel(channel, channel_nm,
103 + android.app.NotificationManager.IMPORTANCE_DEFAULT);
104 + channelMessage.setDescription("alarm for your cctv");
105 + channelMessage.enableLights(true);
106 + channelMessage.enableVibration(true);
107 + channelMessage.setShowBadge(false);
108 + channelMessage.setVibrationPattern(new long[]{100, 200, 100, 200});
109 + notichannel.createNotificationChannel(channelMessage);
110 +
111 + NotificationCompat.Builder notificationBuilder =
112 + new NotificationCompat.Builder(this, channel)
113 + .setSmallIcon(R.drawable.ic_notifications_black_24dp)
114 + .setContentTitle(title)
115 + .setContentText(message)
116 + .setChannelId(channel)
117 + .setAutoCancel(true)
118 + .setDefaults(Notification.DEFAULT_SOUND | Notification.DEFAULT_VIBRATE);
119 +
120 + NotificationManager notificationManager =
121 + (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
122 +
123 + notificationManager.notify(9999, notificationBuilder.build());
124 +
125 + } else {
126 + Intent resultIntent = new Intent(this, ResultActivity.class);
127 + TaskStackBuilder stackBuilder = TaskStackBuilder.create(this);
128 + stackBuilder.addParentStack( MainActivity.class );
129 + stackBuilder.addNextIntent(resultIntent);
130 + PendingIntent resultPendingIntent = stackBuilder.getPendingIntent(0, PendingIntent.FLAG_UPDATE_CURRENT);
131 +
132 + NotificationCompat.Builder notificationBuilder =
133 + new NotificationCompat.Builder(this, "")
134 + .setSmallIcon(R.drawable.ic_notifications_black_24dp)
135 + .setContentTitle(title)
136 + .setContentText(message)
137 + .setAutoCancel(true)
138 + .setDefaults(Notification.DEFAULT_SOUND | Notification.DEFAULT_VIBRATE)
139 + .setContentIntent(resultPendingIntent);
140 +
141 +
142 + NotificationManager notificationManager =
143 + (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
144 +
145 + notificationManager.notify(9999, notificationBuilder.build());
146 +
147 + }
148 + }
149 +
150 +}
...\ No newline at end of file ...\ No newline at end of file
1 +package com.example.whatsup;
2 +import android.content.ContentValues;
3 +import android.os.AsyncTask;
4 +import android.util.Log;
5 +
6 +import java.io.File;
7 +import java.io.IOException;
8 +import okhttp3.MultipartBody;
9 +import okhttp3.OkHttpClient;
10 +import okhttp3.Request;
11 +import okhttp3.RequestBody;
12 +import okhttp3.Response;
13 +
14 +public class NetworkTask extends AsyncTask<Void, Void, String> {
15 +
16 + private String url;
17 + /*
18 + * 이 NetworkTask Class를 기반으로 http 통신을 이용하여 api를 호출 할 수 있습니다.
19 + * 이 NetworkTask Class는 사용자 고유 디바이스 토큰정보를 저장할 때 사용됩니다.
20 + * */
21 + public NetworkTask(String url) {
22 + this.url = url;
23 + }
24 +
25 + @Override
26 + protected String doInBackground(Void... params) {
27 + RequestBody requestBody = new MultipartBody.Builder().setType(MultipartBody.FORM)
28 + .addFormDataPart("userId", Singleton.getInstance().getUserId())
29 + .addFormDataPart("userToken", Singleton.getInstance().getUserToken())
30 + .build();
31 +
32 + OkHttpClient client = new OkHttpClient();
33 + Request request = new Request.Builder().url(url).post(requestBody).build();
34 + Response response = null;
35 + try {
36 + response = client.newCall(request).execute();
37 + } catch (IOException e) {
38 + e.printStackTrace();
39 + }
40 + if (response != null)
41 + Log.i("RES", response.toString());
42 + return "hello";
43 + }
44 +
45 + @Override
46 + protected void onPostExecute(String s) {
47 + super.onPostExecute(s);
48 + if(s != null)
49 + Log.i("RESPONSE : ", s);
50 + }
51 +
52 + @Override
53 + protected void onPreExecute() {
54 +
55 + }
56 +}
57 +
1 +package com.example.whatsup;
2 +
3 +public class Singleton {
4 + /*
5 + * 이 Singleton은 애플리케이션이 시작될 때 어떤 클래스가 최초 한번만 메모리를 할당하고(Static) 그 메모리에 인스턴스를 만들어 사용하는 클래스입니다.
6 + * 유저 정보를 저장하고 접근할 때, 사용됩니다.
7 + * */
8 + private String userId;
9 + private String userToken;
10 + public String getUserId()
11 + {
12 + return userId;
13 + }
14 + public void setUserId(String data)
15 + {
16 + this.userId = data;
17 + }
18 + public void setUserToken(String data)
19 + {
20 + this.userToken = data;
21 + }
22 + public String getUserToken()
23 + {
24 + return userToken;
25 + }
26 + private static Singleton instance = null;
27 +
28 + public static synchronized Singleton getInstance(){
29 + if(null == instance){
30 + instance = new Singleton();
31 + }
32 + return instance;
33 + }
34 +}
1 +package com.example.whatsup.ui.dashboard;
2 +
3 +import android.os.Bundle;
4 +import android.view.LayoutInflater;
5 +import android.view.View;
6 +import android.view.ViewGroup;
7 +import android.widget.TextView;
8 +
9 +import androidx.annotation.NonNull;
10 +import androidx.annotation.Nullable;
11 +import androidx.fragment.app.Fragment;
12 +import androidx.lifecycle.Observer;
13 +import androidx.lifecycle.ViewModelProviders;
14 +
15 +import com.example.whatsup.R;
16 +
17 +public class DashboardFragment extends Fragment {
18 +
19 + private DashboardViewModel dashboardViewModel;
20 +
21 + public View onCreateView(@NonNull LayoutInflater inflater,
22 + ViewGroup container, Bundle savedInstanceState) {
23 + dashboardViewModel =
24 + ViewModelProviders.of(this).get(DashboardViewModel.class);
25 + View root = inflater.inflate(R.layout.fragment_dashboard, container, false);
26 + final TextView textView = root.findViewById(R.id.text_dashboard);
27 + dashboardViewModel.getText().observe(getViewLifecycleOwner(), new Observer<String>() {
28 + @Override
29 + public void onChanged(@Nullable String s) {
30 + textView.setText(s);
31 + }
32 + });
33 + return root;
34 + }
35 +}
1 +package com.example.whatsup.ui.dashboard;
2 +
3 +import androidx.lifecycle.LiveData;
4 +import androidx.lifecycle.MutableLiveData;
5 +import androidx.lifecycle.ViewModel;
6 +
7 +public class DashboardViewModel extends ViewModel {
8 +
9 + private MutableLiveData<String> mText;
10 +
11 + public DashboardViewModel() {
12 + mText = new MutableLiveData<>();
13 + mText.setValue("This is dashboard fragment");
14 + }
15 +
16 + public LiveData<String> getText() {
17 + return mText;
18 + }
19 +}
...\ No newline at end of file ...\ No newline at end of file
1 +package com.example.whatsup.ui.home;
2 +
3 +import android.os.Bundle;
4 +import android.view.LayoutInflater;
5 +import android.view.View;
6 +import android.view.ViewGroup;
7 +import android.widget.TextView;
8 +
9 +import androidx.annotation.NonNull;
10 +import androidx.annotation.Nullable;
11 +import androidx.fragment.app.Fragment;
12 +import androidx.lifecycle.Observer;
13 +import androidx.lifecycle.ViewModelProviders;
14 +
15 +import com.example.whatsup.R;
16 +
17 +public class HomeFragment extends Fragment {
18 +
19 + private HomeViewModel homeViewModel;
20 +
21 + public View onCreateView(@NonNull LayoutInflater inflater,
22 + ViewGroup container, Bundle savedInstanceState) {
23 + homeViewModel =
24 + ViewModelProviders.of(this).get(HomeViewModel.class);
25 + View root = inflater.inflate(R.layout.fragment_home, container, false);
26 + final TextView textView = root.findViewById(R.id.text_home);
27 + homeViewModel.getText().observe(getViewLifecycleOwner(), new Observer<String>() {
28 + @Override
29 + public void onChanged(@Nullable String s) {
30 + textView.setText(s);
31 + }
32 + });
33 + return root;
34 + }
35 +}
1 +package com.example.whatsup.ui.home;
2 +
3 +import androidx.lifecycle.LiveData;
4 +import androidx.lifecycle.MutableLiveData;
5 +import androidx.lifecycle.ViewModel;
6 +
7 +public class HomeViewModel extends ViewModel {
8 +
9 + private MutableLiveData<String> mText;
10 +
11 + public HomeViewModel() {
12 + mText = new MutableLiveData<>();
13 + mText.setValue("This is home fragment");
14 + }
15 +
16 + public LiveData<String> getText() {
17 + return mText;
18 + }
19 +}
...\ No newline at end of file ...\ No newline at end of file
1 +package com.example.whatsup.ui.notifications;
2 +
3 +import android.os.Bundle;
4 +import android.view.LayoutInflater;
5 +import android.view.View;
6 +import android.view.ViewGroup;
7 +import android.widget.TextView;
8 +
9 +import androidx.annotation.NonNull;
10 +import androidx.annotation.Nullable;
11 +import androidx.fragment.app.Fragment;
12 +import androidx.lifecycle.Observer;
13 +import androidx.lifecycle.ViewModelProviders;
14 +
15 +import com.example.whatsup.R;
16 +
17 +public class NotificationsFragment extends Fragment {
18 +
19 + private NotificationsViewModel notificationsViewModel;
20 +
21 + public View onCreateView(@NonNull LayoutInflater inflater,
22 + ViewGroup container, Bundle savedInstanceState) {
23 + notificationsViewModel =
24 + ViewModelProviders.of(this).get(NotificationsViewModel.class);
25 + View root = inflater.inflate(R.layout.fragment_notifications, container, false);
26 + final TextView textView = root.findViewById(R.id.text_notifications);
27 + notificationsViewModel.getText().observe(getViewLifecycleOwner(), new Observer<String>() {
28 + @Override
29 + public void onChanged(@Nullable String s) {
30 + textView.setText(s);
31 + }
32 + });
33 + return root;
34 + }
35 +}
1 +package com.example.whatsup.ui.notifications;
2 +
3 +import androidx.lifecycle.LiveData;
4 +import androidx.lifecycle.MutableLiveData;
5 +import androidx.lifecycle.ViewModel;
6 +
7 +public class NotificationsViewModel extends ViewModel {
8 +
9 + private MutableLiveData<String> mText;
10 +
11 + public NotificationsViewModel() {
12 + mText = new MutableLiveData<>();
13 + mText.setValue("This is notifications fragment");
14 + }
15 +
16 + public LiveData<String> getText() {
17 + return mText;
18 + }
19 +}
...\ No newline at end of file ...\ No newline at end of file
1 +<vector xmlns:android="http://schemas.android.com/apk/res/android"
2 + xmlns:aapt="http://schemas.android.com/aapt"
3 + android:width="108dp"
4 + android:height="108dp"
5 + android:viewportWidth="108"
6 + android:viewportHeight="108">
7 + <path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
8 + <aapt:attr name="android:fillColor">
9 + <gradient
10 + android:endX="85.84757"
11 + android:endY="92.4963"
12 + android:startX="42.9492"
13 + android:startY="49.59793"
14 + android:type="linear">
15 + <item
16 + android:color="#44000000"
17 + android:offset="0.0" />
18 + <item
19 + android:color="#00000000"
20 + android:offset="1.0" />
21 + </gradient>
22 + </aapt:attr>
23 + </path>
24 + <path
25 + android:fillColor="#FFFFFF"
26 + android:fillType="nonZero"
27 + android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
28 + android:strokeWidth="1"
29 + android:strokeColor="#00000000" />
30 +</vector>
...\ No newline at end of file ...\ No newline at end of file
1 +<vector xmlns:android="http://schemas.android.com/apk/res/android"
2 + android:width="24dp"
3 + android:height="24dp"
4 + android:viewportWidth="24.0"
5 + android:viewportHeight="24.0">
6 + <path
7 + android:fillColor="#FF000000"
8 + android:pathData="M3,13h8L11,3L3,3v10zM3,21h8v-6L3,15v6zM13,21h8L21,11h-8v10zM13,3v6h8L21,3h-8z" />
9 +</vector>
1 +<vector xmlns:android="http://schemas.android.com/apk/res/android"
2 + android:width="24dp"
3 + android:height="24dp"
4 + android:viewportWidth="24.0"
5 + android:viewportHeight="24.0">
6 + <path
7 + android:fillColor="#FF000000"
8 + android:pathData="M10,20v-6h4v6h5v-8h3L12,3 2,12h3v8z" />
9 +</vector>
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<vector xmlns:android="http://schemas.android.com/apk/res/android"
3 + android:width="108dp"
4 + android:height="108dp"
5 + android:viewportWidth="108"
6 + android:viewportHeight="108">
7 + <path
8 + android:fillColor="#3DDC84"
9 + android:pathData="M0,0h108v108h-108z" />
10 + <path
11 + android:fillColor="#00000000"
12 + android:pathData="M9,0L9,108"
13 + android:strokeWidth="0.8"
14 + android:strokeColor="#33FFFFFF" />
15 + <path
16 + android:fillColor="#00000000"
17 + android:pathData="M19,0L19,108"
18 + android:strokeWidth="0.8"
19 + android:strokeColor="#33FFFFFF" />
20 + <path
21 + android:fillColor="#00000000"
22 + android:pathData="M29,0L29,108"
23 + android:strokeWidth="0.8"
24 + android:strokeColor="#33FFFFFF" />
25 + <path
26 + android:fillColor="#00000000"
27 + android:pathData="M39,0L39,108"
28 + android:strokeWidth="0.8"
29 + android:strokeColor="#33FFFFFF" />
30 + <path
31 + android:fillColor="#00000000"
32 + android:pathData="M49,0L49,108"
33 + android:strokeWidth="0.8"
34 + android:strokeColor="#33FFFFFF" />
35 + <path
36 + android:fillColor="#00000000"
37 + android:pathData="M59,0L59,108"
38 + android:strokeWidth="0.8"
39 + android:strokeColor="#33FFFFFF" />
40 + <path
41 + android:fillColor="#00000000"
42 + android:pathData="M69,0L69,108"
43 + android:strokeWidth="0.8"
44 + android:strokeColor="#33FFFFFF" />
45 + <path
46 + android:fillColor="#00000000"
47 + android:pathData="M79,0L79,108"
48 + android:strokeWidth="0.8"
49 + android:strokeColor="#33FFFFFF" />
50 + <path
51 + android:fillColor="#00000000"
52 + android:pathData="M89,0L89,108"
53 + android:strokeWidth="0.8"
54 + android:strokeColor="#33FFFFFF" />
55 + <path
56 + android:fillColor="#00000000"
57 + android:pathData="M99,0L99,108"
58 + android:strokeWidth="0.8"
59 + android:strokeColor="#33FFFFFF" />
60 + <path
61 + android:fillColor="#00000000"
62 + android:pathData="M0,9L108,9"
63 + android:strokeWidth="0.8"
64 + android:strokeColor="#33FFFFFF" />
65 + <path
66 + android:fillColor="#00000000"
67 + android:pathData="M0,19L108,19"
68 + android:strokeWidth="0.8"
69 + android:strokeColor="#33FFFFFF" />
70 + <path
71 + android:fillColor="#00000000"
72 + android:pathData="M0,29L108,29"
73 + android:strokeWidth="0.8"
74 + android:strokeColor="#33FFFFFF" />
75 + <path
76 + android:fillColor="#00000000"
77 + android:pathData="M0,39L108,39"
78 + android:strokeWidth="0.8"
79 + android:strokeColor="#33FFFFFF" />
80 + <path
81 + android:fillColor="#00000000"
82 + android:pathData="M0,49L108,49"
83 + android:strokeWidth="0.8"
84 + android:strokeColor="#33FFFFFF" />
85 + <path
86 + android:fillColor="#00000000"
87 + android:pathData="M0,59L108,59"
88 + android:strokeWidth="0.8"
89 + android:strokeColor="#33FFFFFF" />
90 + <path
91 + android:fillColor="#00000000"
92 + android:pathData="M0,69L108,69"
93 + android:strokeWidth="0.8"
94 + android:strokeColor="#33FFFFFF" />
95 + <path
96 + android:fillColor="#00000000"
97 + android:pathData="M0,79L108,79"
98 + android:strokeWidth="0.8"
99 + android:strokeColor="#33FFFFFF" />
100 + <path
101 + android:fillColor="#00000000"
102 + android:pathData="M0,89L108,89"
103 + android:strokeWidth="0.8"
104 + android:strokeColor="#33FFFFFF" />
105 + <path
106 + android:fillColor="#00000000"
107 + android:pathData="M0,99L108,99"
108 + android:strokeWidth="0.8"
109 + android:strokeColor="#33FFFFFF" />
110 + <path
111 + android:fillColor="#00000000"
112 + android:pathData="M19,29L89,29"
113 + android:strokeWidth="0.8"
114 + android:strokeColor="#33FFFFFF" />
115 + <path
116 + android:fillColor="#00000000"
117 + android:pathData="M19,39L89,39"
118 + android:strokeWidth="0.8"
119 + android:strokeColor="#33FFFFFF" />
120 + <path
121 + android:fillColor="#00000000"
122 + android:pathData="M19,49L89,49"
123 + android:strokeWidth="0.8"
124 + android:strokeColor="#33FFFFFF" />
125 + <path
126 + android:fillColor="#00000000"
127 + android:pathData="M19,59L89,59"
128 + android:strokeWidth="0.8"
129 + android:strokeColor="#33FFFFFF" />
130 + <path
131 + android:fillColor="#00000000"
132 + android:pathData="M19,69L89,69"
133 + android:strokeWidth="0.8"
134 + android:strokeColor="#33FFFFFF" />
135 + <path
136 + android:fillColor="#00000000"
137 + android:pathData="M19,79L89,79"
138 + android:strokeWidth="0.8"
139 + android:strokeColor="#33FFFFFF" />
140 + <path
141 + android:fillColor="#00000000"
142 + android:pathData="M29,19L29,89"
143 + android:strokeWidth="0.8"
144 + android:strokeColor="#33FFFFFF" />
145 + <path
146 + android:fillColor="#00000000"
147 + android:pathData="M39,19L39,89"
148 + android:strokeWidth="0.8"
149 + android:strokeColor="#33FFFFFF" />
150 + <path
151 + android:fillColor="#00000000"
152 + android:pathData="M49,19L49,89"
153 + android:strokeWidth="0.8"
154 + android:strokeColor="#33FFFFFF" />
155 + <path
156 + android:fillColor="#00000000"
157 + android:pathData="M59,19L59,89"
158 + android:strokeWidth="0.8"
159 + android:strokeColor="#33FFFFFF" />
160 + <path
161 + android:fillColor="#00000000"
162 + android:pathData="M69,19L69,89"
163 + android:strokeWidth="0.8"
164 + android:strokeColor="#33FFFFFF" />
165 + <path
166 + android:fillColor="#00000000"
167 + android:pathData="M79,19L79,89"
168 + android:strokeWidth="0.8"
169 + android:strokeColor="#33FFFFFF" />
170 +</vector>
1 +<vector xmlns:android="http://schemas.android.com/apk/res/android"
2 + android:width="24dp"
3 + android:height="24dp"
4 + android:viewportWidth="24.0"
5 + android:viewportHeight="24.0">
6 + <path
7 + android:fillColor="#FF000000"
8 + android:pathData="M12,22c1.1,0 2,-0.9 2,-2h-4c0,1.1 0.89,2 2,2zM18,16v-5c0,-3.07 -1.64,-5.64 -4.5,-6.32L13.5,4c0,-0.83 -0.67,-1.5 -1.5,-1.5s-1.5,0.67 -1.5,1.5v0.68C7.63,5.36 6,7.92 6,11v5l-2,2v1h16v-1l-2,-2z" />
9 +</vector>
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
3 + xmlns:app="http://schemas.android.com/apk/res-auto"
4 + android:id="@+id/container"
5 + android:layout_width="match_parent"
6 + android:layout_height="match_parent"
7 + android:paddingTop="?attr/actionBarSize">
8 + <RelativeLayout
9 + xmlns:android="http://schemas.android.com/apk/res/android"
10 + xmlns:tools="http://schemas.android.com/tools"
11 + android:id="@+id/rl"
12 + android:layout_width="match_parent"
13 + android:layout_height="match_parent"
14 + android:padding="10dp"
15 + tools:context=".MainActivity"
16 + android:background="#c6cabd"
17 + >
18 + <EditText android:id="@+id/userId" android:width="220px"
19 + android:layout_height="400px"
20 + android:layout_width="match_parent" />
21 + <Button
22 + android:id="@+id/button"
23 + android:layout_width="wrap_content"
24 + android:layout_height="wrap_content"
25 + app:backgroundTint="#F57F17"
26 + android:text= "setToken"
27 + app:fabSize="auto"
28 + app:tint="@android:color/white" />
29 + </RelativeLayout>
30 +
31 +
32 + <com.google.android.material.bottomnavigation.BottomNavigationView
33 + android:id="@+id/nav_view"
34 + android:layout_width="0dp"
35 + android:layout_height="wrap_content"
36 + android:layout_marginStart="0dp"
37 + android:layout_marginEnd="0dp"
38 + android:background="?android:attr/windowBackground"
39 + app:layout_constraintBottom_toBottomOf="parent"
40 + app:layout_constraintLeft_toLeftOf="parent"
41 + app:layout_constraintRight_toRightOf="parent"
42 + app:menu="@menu/bottom_nav_menu" />
43 +
44 +
45 +
46 + <fragment
47 + android:id="@+id/nav_host_fragment"
48 + android:name="androidx.navigation.fragment.NavHostFragment"
49 + android:layout_width="match_parent"
50 + android:layout_height="match_parent"
51 + app:defaultNavHost="true"
52 + app:layout_constraintBottom_toTopOf="@id/nav_view"
53 + app:layout_constraintLeft_toLeftOf="parent"
54 + app:layout_constraintRight_toRightOf="parent"
55 + app:layout_constraintTop_toTopOf="parent"
56 + app:navGraph="@navigation/mobile_navigation" />
57 +
58 +</androidx.constraintlayout.widget.ConstraintLayout>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
3 + xmlns:app="http://schemas.android.com/apk/res-auto"
4 + xmlns:tools="http://schemas.android.com/tools"
5 + android:layout_width="match_parent"
6 + android:layout_height="match_parent"
7 + tools:context=".ui.dashboard.DashboardFragment">
8 +
9 + <TextView
10 + android:id="@+id/text_dashboard"
11 + android:layout_width="match_parent"
12 + android:layout_height="wrap_content"
13 + android:layout_marginStart="8dp"
14 + android:layout_marginTop="8dp"
15 + android:layout_marginEnd="8dp"
16 + android:textAlignment="center"
17 + android:textSize="20sp"
18 + app:layout_constraintBottom_toBottomOf="parent"
19 + app:layout_constraintEnd_toEndOf="parent"
20 + app:layout_constraintStart_toStartOf="parent"
21 + app:layout_constraintTop_toTopOf="parent" />
22 +</androidx.constraintlayout.widget.ConstraintLayout>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
3 + xmlns:app="http://schemas.android.com/apk/res-auto"
4 + xmlns:tools="http://schemas.android.com/tools"
5 + android:layout_width="match_parent"
6 + android:layout_height="match_parent"
7 + tools:context=".ui.home.HomeFragment">
8 +
9 + <TextView
10 + android:id="@+id/text_home"
11 + android:layout_width="match_parent"
12 + android:layout_height="wrap_content"
13 + android:layout_marginStart="8dp"
14 + android:layout_marginTop="8dp"
15 + android:layout_marginEnd="8dp"
16 + android:textAlignment="center"
17 + android:textSize="20sp"
18 + app:layout_constraintBottom_toBottomOf="parent"
19 + app:layout_constraintEnd_toEndOf="parent"
20 + app:layout_constraintStart_toStartOf="parent"
21 + app:layout_constraintTop_toTopOf="parent" />
22 +</androidx.constraintlayout.widget.ConstraintLayout>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
3 + xmlns:app="http://schemas.android.com/apk/res-auto"
4 + xmlns:tools="http://schemas.android.com/tools"
5 + android:layout_width="match_parent"
6 + android:layout_height="match_parent"
7 + tools:context=".ui.notifications.NotificationsFragment">
8 +
9 + <TextView
10 + android:id="@+id/text_notifications"
11 + android:layout_width="match_parent"
12 + android:layout_height="wrap_content"
13 + android:layout_marginStart="8dp"
14 + android:layout_marginTop="8dp"
15 + android:layout_marginEnd="8dp"
16 + android:textAlignment="center"
17 + android:textSize="20sp"
18 + app:layout_constraintBottom_toBottomOf="parent"
19 + app:layout_constraintEnd_toEndOf="parent"
20 + app:layout_constraintStart_toStartOf="parent"
21 + app:layout_constraintTop_toTopOf="parent" />
22 +</androidx.constraintlayout.widget.ConstraintLayout>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<menu xmlns:android="http://schemas.android.com/apk/res/android">
3 +
4 + <item
5 + android:id="@+id/navigation_home"
6 + android:icon="@drawable/ic_home_black_24dp"
7 + android:title="@string/title_home" />
8 +
9 + <item
10 + android:id="@+id/navigation_dashboard"
11 + android:icon="@drawable/ic_dashboard_black_24dp"
12 + android:title="@string/title_dashboard" />
13 +
14 + <item
15 + android:id="@+id/navigation_notifications"
16 + android:icon="@drawable/ic_notifications_black_24dp"
17 + android:title="@string/title_notifications" />
18 +
19 +</menu>
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
3 + <background android:drawable="@drawable/ic_launcher_background" />
4 + <foreground android:drawable="@drawable/ic_launcher_foreground" />
5 +</adaptive-icon>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
3 + <background android:drawable="@drawable/ic_launcher_background" />
4 + <foreground android:drawable="@drawable/ic_launcher_foreground" />
5 +</adaptive-icon>
...\ No newline at end of file ...\ No newline at end of file
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<navigation xmlns:android="http://schemas.android.com/apk/res/android"
3 + xmlns:app="http://schemas.android.com/apk/res-auto"
4 + xmlns:tools="http://schemas.android.com/tools"
5 + android:id="@+id/mobile_navigation"
6 + app:startDestination="@+id/navigation_home">
7 +
8 + <fragment
9 + android:id="@+id/navigation_home"
10 + android:name="com.example.whatsup.ui.home.HomeFragment"
11 + android:label="@string/title_home"
12 + tools:layout="@layout/fragment_home" />
13 +
14 + <fragment
15 + android:id="@+id/navigation_dashboard"
16 + android:name="com.example.whatsup.ui.dashboard.DashboardFragment"
17 + android:label="@string/title_dashboard"
18 + tools:layout="@layout/fragment_dashboard" />
19 +
20 + <fragment
21 + android:id="@+id/navigation_notifications"
22 + android:name="com.example.whatsup.ui.notifications.NotificationsFragment"
23 + android:label="@string/title_notifications"
24 + tools:layout="@layout/fragment_notifications" />
25 +</navigation>
1 +<?xml version="1.0" encoding="utf-8"?>
2 +<resources>
3 + <color name="colorPrimary">#6200EE</color>
4 + <color name="colorPrimaryDark">#3700B3</color>
5 + <color name="colorAccent">#03DAC5</color>
6 +</resources>
1 +<resources>
2 + <!-- Default screen margins, per the Android Design guidelines. -->
3 + <dimen name="activity_horizontal_margin">16dp</dimen>
4 + <dimen name="activity_vertical_margin">16dp</dimen>
5 +</resources>
1 +<resources>
2 + <string name="app_name">whatsUP</string>
3 + <string name="title_home">Home</string>
4 + <string name="title_dashboard">Dashboard</string>
5 + <string name="title_notifications">Notifications</string>
6 + <string name="default_notification_channel_id" translatable="false">fcm_default_channel</string>
7 + <string name="default_notification_channel_name" translatable="true">Test</string>
8 +</resources>
1 +<resources>
2 +
3 + <!-- Base application theme. -->
4 + <style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
5 + <!-- Customize your theme here. -->
6 + <item name="colorPrimary">@color/colorPrimary</item>
7 + <item name="colorPrimaryDark">@color/colorPrimaryDark</item>
8 + <item name="colorAccent">@color/colorAccent</item>
9 + </style>
10 +
11 +</resources>
1 +package com.example.whatsup;
2 +
3 +import org.junit.Test;
4 +
5 +import static org.junit.Assert.*;
6 +
7 +/**
8 + * Example local unit test, which will execute on the development machine (host).
9 + *
10 + * @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
11 + */
12 +public class ExampleUnitTest {
13 + @Test
14 + public void addition_isCorrect() {
15 + assertEquals(4, 2 + 2);
16 + }
17 +}
...\ No newline at end of file ...\ No newline at end of file
1 +// Top-level build file where you can add configuration options common to all sub-projects/modules.
2 +
3 +buildscript {
4 +
5 + repositories {
6 + google()
7 + jcenter()
8 +
9 + }
10 + dependencies {
11 + classpath 'com.android.tools.build:gradle:3.6.3'
12 + classpath 'com.google.gms:google-services:4.2.0' // Google Services plugin
13 +
14 + // NOTE: Do not place your application dependencies here; they belong
15 + // in the individual module build.gradle files
16 + }
17 +}
18 +
19 +allprojects {
20 + repositories {
21 + google()
22 + jcenter()
23 +
24 + }
25 +}
26 +
27 +task clean(type: Delete) {
28 + delete rootProject.buildDir
29 +}
1 +# Project-wide Gradle settings.
2 +# IDE (e.g. Android Studio) users:
3 +# Gradle settings configured through the IDE *will override*
4 +# any settings specified in this file.
5 +# For more details on how to configure your build environment visit
6 +# http://www.gradle.org/docs/current/userguide/build_environment.html
7 +# Specifies the JVM arguments used for the daemon process.
8 +# The setting is particularly useful for tweaking memory settings.
9 +org.gradle.jvmargs=-Xmx1536m
10 +# When configured, Gradle will run in incubating parallel mode.
11 +# This option should only be used with decoupled projects. More details, visit
12 +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 +# org.gradle.parallel=true
14 +# AndroidX package structure to make it clearer which packages are bundled with the
15 +# Android operating system, and which are packaged with your app's APK
16 +# https://developer.android.com/topic/libraries/support-library/androidx-rn
17 +android.useAndroidX=true
18 +# Automatically convert third-party libraries to use AndroidX
19 +android.enableJetifier=true
20 +
1 +#Tue May 12 17:50:08 KST 2020
2 +distributionBase=GRADLE_USER_HOME
3 +distributionPath=wrapper/dists
4 +zipStoreBase=GRADLE_USER_HOME
5 +zipStorePath=wrapper/dists
6 +distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
1 +#!/usr/bin/env sh
2 +
3 +##############################################################################
4 +##
5 +## Gradle start up script for UN*X
6 +##
7 +##############################################################################
8 +
9 +# Attempt to set APP_HOME
10 +# Resolve links: $0 may be a link
11 +PRG="$0"
12 +# Need this for relative symlinks.
13 +while [ -h "$PRG" ] ; do
14 + ls=`ls -ld "$PRG"`
15 + link=`expr "$ls" : '.*-> \(.*\)$'`
16 + if expr "$link" : '/.*' > /dev/null; then
17 + PRG="$link"
18 + else
19 + PRG=`dirname "$PRG"`"/$link"
20 + fi
21 +done
22 +SAVED="`pwd`"
23 +cd "`dirname \"$PRG\"`/" >/dev/null
24 +APP_HOME="`pwd -P`"
25 +cd "$SAVED" >/dev/null
26 +
27 +APP_NAME="Gradle"
28 +APP_BASE_NAME=`basename "$0"`
29 +
30 +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 +DEFAULT_JVM_OPTS=""
32 +
33 +# Use the maximum available, or set MAX_FD != -1 to use that value.
34 +MAX_FD="maximum"
35 +
36 +warn () {
37 + echo "$*"
38 +}
39 +
40 +die () {
41 + echo
42 + echo "$*"
43 + echo
44 + exit 1
45 +}
46 +
47 +# OS specific support (must be 'true' or 'false').
48 +cygwin=false
49 +msys=false
50 +darwin=false
51 +nonstop=false
52 +case "`uname`" in
53 + CYGWIN* )
54 + cygwin=true
55 + ;;
56 + Darwin* )
57 + darwin=true
58 + ;;
59 + MINGW* )
60 + msys=true
61 + ;;
62 + NONSTOP* )
63 + nonstop=true
64 + ;;
65 +esac
66 +
67 +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 +
69 +# Determine the Java command to use to start the JVM.
70 +if [ -n "$JAVA_HOME" ] ; then
71 + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 + # IBM's JDK on AIX uses strange locations for the executables
73 + JAVACMD="$JAVA_HOME/jre/sh/java"
74 + else
75 + JAVACMD="$JAVA_HOME/bin/java"
76 + fi
77 + if [ ! -x "$JAVACMD" ] ; then
78 + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 +
80 +Please set the JAVA_HOME variable in your environment to match the
81 +location of your Java installation."
82 + fi
83 +else
84 + JAVACMD="java"
85 + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 +
87 +Please set the JAVA_HOME variable in your environment to match the
88 +location of your Java installation."
89 +fi
90 +
91 +# Increase the maximum file descriptors if we can.
92 +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 + MAX_FD_LIMIT=`ulimit -H -n`
94 + if [ $? -eq 0 ] ; then
95 + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 + MAX_FD="$MAX_FD_LIMIT"
97 + fi
98 + ulimit -n $MAX_FD
99 + if [ $? -ne 0 ] ; then
100 + warn "Could not set maximum file descriptor limit: $MAX_FD"
101 + fi
102 + else
103 + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 + fi
105 +fi
106 +
107 +# For Darwin, add options to specify how the application appears in the dock
108 +if $darwin; then
109 + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 +fi
111 +
112 +# For Cygwin, switch paths to Windows format before running java
113 +if $cygwin ; then
114 + APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 + JAVACMD=`cygpath --unix "$JAVACMD"`
117 +
118 + # We build the pattern for arguments to be converted via cygpath
119 + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 + SEP=""
121 + for dir in $ROOTDIRSRAW ; do
122 + ROOTDIRS="$ROOTDIRS$SEP$dir"
123 + SEP="|"
124 + done
125 + OURCYGPATTERN="(^($ROOTDIRS))"
126 + # Add a user-defined pattern to the cygpath arguments
127 + if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 + fi
130 + # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 + i=0
132 + for arg in "$@" ; do
133 + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 +
136 + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 + else
139 + eval `echo args$i`="\"$arg\""
140 + fi
141 + i=$((i+1))
142 + done
143 + case $i in
144 + (0) set -- ;;
145 + (1) set -- "$args0" ;;
146 + (2) set -- "$args0" "$args1" ;;
147 + (3) set -- "$args0" "$args1" "$args2" ;;
148 + (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 + esac
155 +fi
156 +
157 +# Escape application args
158 +save () {
159 + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 + echo " "
161 +}
162 +APP_ARGS=$(save "$@")
163 +
164 +# Collect all arguments for the java command, following the shell quoting and substitution rules
165 +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 +
167 +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 + cd "$(dirname "$0")"
170 +fi
171 +
172 +exec "$JAVACMD" "$@"
1 +@if "%DEBUG%" == "" @echo off
2 +@rem ##########################################################################
3 +@rem
4 +@rem Gradle startup script for Windows
5 +@rem
6 +@rem ##########################################################################
7 +
8 +@rem Set local scope for the variables with windows NT shell
9 +if "%OS%"=="Windows_NT" setlocal
10 +
11 +set DIRNAME=%~dp0
12 +if "%DIRNAME%" == "" set DIRNAME=.
13 +set APP_BASE_NAME=%~n0
14 +set APP_HOME=%DIRNAME%
15 +
16 +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 +set DEFAULT_JVM_OPTS=
18 +
19 +@rem Find java.exe
20 +if defined JAVA_HOME goto findJavaFromJavaHome
21 +
22 +set JAVA_EXE=java.exe
23 +%JAVA_EXE% -version >NUL 2>&1
24 +if "%ERRORLEVEL%" == "0" goto init
25 +
26 +echo.
27 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 +echo.
29 +echo Please set the JAVA_HOME variable in your environment to match the
30 +echo location of your Java installation.
31 +
32 +goto fail
33 +
34 +:findJavaFromJavaHome
35 +set JAVA_HOME=%JAVA_HOME:"=%
36 +set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 +
38 +if exist "%JAVA_EXE%" goto init
39 +
40 +echo.
41 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 +echo.
43 +echo Please set the JAVA_HOME variable in your environment to match the
44 +echo location of your Java installation.
45 +
46 +goto fail
47 +
48 +:init
49 +@rem Get command-line arguments, handling Windows variants
50 +
51 +if not "%OS%" == "Windows_NT" goto win9xME_args
52 +
53 +:win9xME_args
54 +@rem Slurp the command line arguments.
55 +set CMD_LINE_ARGS=
56 +set _SKIP=2
57 +
58 +:win9xME_args_slurp
59 +if "x%~1" == "x" goto execute
60 +
61 +set CMD_LINE_ARGS=%*
62 +
63 +:execute
64 +@rem Setup the command line
65 +
66 +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 +
68 +@rem Execute Gradle
69 +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 +
71 +:end
72 +@rem End local scope for the variables with windows NT shell
73 +if "%ERRORLEVEL%"=="0" goto mainEnd
74 +
75 +:fail
76 +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 +rem the _cmd.exe /c_ return code!
78 +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 +exit /b 1
80 +
81 +:mainEnd
82 +if "%OS%"=="Windows_NT" endlocal
83 +
84 +:omega
1 +## This file is automatically generated by Android Studio.
2 +# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
3 +#
4 +# This file should *NOT* be checked into Version Control Systems,
5 +# as it contains information specific to your local configuration.
6 +#
7 +# Location of the SDK. This is only used by Gradle.
8 +# For customization when using a Version Control System, please read the
9 +# header note.
10 +sdk.dir=/Users/gwonjoohee/Library/Android/sdk
...\ No newline at end of file ...\ No newline at end of file
1 +rootProject.name='whatsUP'
2 +include ':app'