王宇洋 3 years ago
parent 927f05a5c1
commit e2a683fdf7

105
.gitignore vendored

@ -0,0 +1,105 @@
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
local.properties
# Built application files
*.apk
*.aar
*.ap_
*.aab
# Files for the ART/Dalvik VM
*.dex
# Java class files
*.class
# Generated files
bin/
gen/
out/
# Uncomment the following line in case you need and you don't have the release build type files in your app
# release/
# Gradle files
.gradle/
build/
# Local configuration file (sdk path, etc)
local.properties
# Proguard folder generated by Eclipse
proguard/
# Log Files
*.log
# Android Studio Navigation editor temp files
.navigation/
# Android Studio captures folder
captures/
# IntelliJ
*.iml
.idea/workspace.xml
.idea/tasks.xml
.idea/gradle.xml
.idea/assetWizardSettings.xml
.idea/dictionaries
.idea/libraries
.idea/jarRepositories.xml
# Android Studio 3 in .gitignore file.
.idea/caches
.idea/modules.xml
# Comment next line if keeping position of elements in Navigation Editor is relevant for you
.idea/navEditor.xml
# Keystore files
# Uncomment the following lines if you do not want to check your keystore files in.
#*.jks
#*.keystore
# External native build folder generated in Android Studio 2.2 and later
.externalNativeBuild
.cxx/
# Google Services (e.g. APIs or Firebase)
# google-services.json
# Freeline
freeline.py
freeline/
freeline_project_description.json
# fastlane
fastlane/report.xml
fastlane/Preview.html
fastlane/screenshots
fastlane/test_output
fastlane/readme.md
# Version control
vcs.xml
# lint
lint/intermediates/
lint/generated/
lint/outputs/
lint/tmp/
# lint/reports/
# Android Profiling
*.hprof

3
.idea/.gitignore vendored

@ -0,0 +1,3 @@
# Default ignored files
/shelf/
/workspace.xml

@ -0,0 +1 @@
My Application

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<bytecodeTargetLevel target="16" />
</component>
</project>

@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DesignSurface">
<option name="filePathToZoomLevelMap">
<map>
<entry key="../../../../layout/custom_preview.xml" value="0.234375" />
<entry key="app/src/main/res/drawable-v24/ic_launcher_foreground.xml" value="0.18717948717948718" />
<entry key="app/src/main/res/drawable/accept.xml" value="0.16538461538461538" />
<entry key="app/src/main/res/drawable/divider.xml" value="0.27564102564102566" />
<entry key="app/src/main/res/drawable/hint_green.xml" value="0.16538461538461538" />
<entry key="app/src/main/res/drawable/hint_red.xml" value="0.16538461538461538" />
<entry key="app/src/main/res/drawable/hint_white.xml" value="0.16538461538461538" />
<entry key="app/src/main/res/drawable/ic_baseline_archive_24.xml" value="0.27564102564102566" />
<entry key="app/src/main/res/drawable/ic_baseline_arrow_back_24.xml" value="0.27564102564102566" />
<entry key="app/src/main/res/drawable/ic_baseline_menu_24.xml" value="0.18717948717948718" />
<entry key="app/src/main/res/drawable/ic_launcher_background.xml" value="0.27564102564102566" />
<entry key="app/src/main/res/drawable/ic_menu_camera.xml" value="0.18717948717948718" />
<entry key="app/src/main/res/drawable/ic_menu_gallery.xml" value="0.18717948717948718" />
<entry key="app/src/main/res/drawable/ic_menu_slideshow.xml" value="0.18717948717948718" />
<entry key="app/src/main/res/drawable/image_upload_button.xml" value="0.14537037037037037" />
<entry key="app/src/main/res/drawable/reject.xml" value="0.27564102564102566" />
<entry key="app/src/main/res/drawable/side_nav_bar.xml" value="0.28076923076923077" />
<entry key="app/src/main/res/layout/activity_main.xml" value="0.2441123188405797" />
<entry key="app/src/main/res/layout/activity_scan.xml" value="0.15213675213675212" />
<entry key="app/src/main/res/layout/app_bar_main.xml" value="0.22604166666666667" />
<entry key="app/src/main/res/layout/content_main.xml" value="0.26458333333333334" />
<entry key="app/src/main/res/layout/fragment_clarity_enhancement.xml" value="0.3052536231884058" />
<entry key="app/src/main/res/layout/fragment_gallery.xml" value="0.2441123188405797" />
<entry key="app/src/main/res/layout/fragment_home.xml" value="0.25" />
<entry key="app/src/main/res/layout/fragment_image_colorize.xml" value="0.3079710144927536" />
<entry key="app/src/main/res/layout/fragment_slideshow.xml" value="0.2441123188405797" />
<entry key="app/src/main/res/layout/image_colorize_upload_activity.xml" value="0.25" />
<entry key="app/src/main/res/layout/index_view.xml" value="0.18318627850730212" />
<entry key="app/src/main/res/layout/nav_header_main.xml" value="0.2441123188405797" />
<entry key="app/src/main/res/layout/new_content_main.xml" value="0.2293651113804904" />
<entry key="app/src/main/res/menu/activity_main_drawer.xml" value="0.22604166666666667" />
<entry key="app/src/main/res/menu/main.xml" value="0.26458333333333334" />
<entry key="app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml" value="0.2841880341880342" />
<entry key="app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml" value="0.2841880341880342" />
</map>
</option>
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_X" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/build/classes" />
</component>
<component name="ProjectType">
<option name="id" value="Android" />
</component>
</project>

@ -0,0 +1,4 @@
U
java:S1118 ":Add a private constructor to hide the implicit public one.(͹’¶ÿÿÿÿÿ
t java:S120"ZRename this package name to match the regular expression '^[a-z_]+(\.[a-z_][a-z0-9_]*)*$'.(<28>ß“ãýÿÿÿÿ

@ -0,0 +1,25 @@
œ
java:S1186"€Add a nested comment explaining why this method is empty, throw an UnsupportedOperationException or complete the implementation.(Ø«ò®ûÿÿÿÿ
o
java:S2293"YReplace the type specification in this constructor call with the diamond operator ("<>").(”ÃÉý
o
java:S2293+"YReplace the type specification in this constructor call with the diamond operator ("<>").(”ÃÉý
o
java:S2293="YReplace the type specification in this constructor call with the diamond operator ("<>").(”ÃÉý
o
java:S2293O"YReplace the type specification in this constructor call with the diamond operator ("<>").(”ÃÉý
o
java:S2293a"YReplace the type specification in this constructor call with the diamond operator ("<>").(”ÃÉý
o
java:S2293s"YReplace the type specification in this constructor call with the diamond operator ("<>").(”ÃÉý
i java:S100ƒ"NRename this method name to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(Ö͈Ÿüÿÿÿÿ
\
java:S1192 "FDefine a constant instead of duplicating this literal "image" 6 times.(¨…¶ˆ
f java:S117 "QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(¨…¶ˆ
f java:S1170"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(¨…¶ˆ
f java:S117B"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(¨…¶ˆ
f java:S117T"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(¨…¶ˆ
f java:S117f"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(¨…¶ˆ
f java:S117x"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(¨…¶ˆ
o java:S120"ZRename this package name to match the regular expression '^[a-z_]+(\.[a-z_][a-z0-9_]*)*$'.(ˆÕ¿Š

@ -0,0 +1,5 @@
> kotlin:S125Œ"Remove this commented out code.(ËÃØ¡8Ò·ÈÔ»/
m kotlin:S1871Ó"MThis branch's code block is the same as the block for the branch on line 208.(§ùÉÂ8ñ·ÈÔ»/
r kotlin:S3776¦"RRefactor this method to reduce its Cognitive Complexity from 18 to the 15 allowed.(Îóòõ8õ·ÈÔ»/
t kotlin:S117"QRename this parameter to match the regular expression "^`?[_a-z][a-zA-Z0-9]*`?$".(—½æàùÿÿÿÿ8ø·ÈÔ»/

@ -0,0 +1,21 @@
e java:S1165"PRename this field "_this" to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(‚Õ°ˆ
2
java:S11167"Remove this empty statement.(Žùøø
U
java:S1161E":Add the "@Override" annotation above this method signature(î•Éðûÿÿÿÿ
V java:S1258"<This block of commented-out lines of code should be removed.(Ó –„þÿÿÿÿ
J
java:S1874¤"3Remove this use of "insertImage"; it is deprecated.(å°À¦
f java:S117V"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(ö᜿
>
java:S1604O"(Make this anonymous inner class a lambda(ľ§ø
C
java:S1604W"(Make this anonymous inner class a lambda(ÁÔ‰Ûúÿÿÿÿ
>
java:S1604o"(Make this anonymous inner class a lambda(񆂢
>
java:S1604w"(Make this anonymous inner class a lambda(çíå<C3AD>
D
java:S1604¨"(Make this anonymous inner class a lambda(’пµþÿÿÿÿ
t java:S120"ZRename this package name to match the regular expression '^[a-z_]+(\.[a-z_][a-z0-9_]*)*$'.(ÇÂÄ„ùÿÿÿÿ

@ -0,0 +1,15 @@
X
java:S1874"6Remove this use of "ProgressDialog"; it is deprecated.(¹£½­úÿÿÿÿ8Êã²Í»/
S
java:S1874"6Remove this use of "ProgressDialog"; it is deprecated.(‘õ—™8Ìã²Í»/
O
java:S1874"2Remove this use of "setMessage"; it is deprecated.(Ü<>Ÿ8Íã²Í»/
U
java:S1874"8Remove this use of "setProgressStyle"; it is deprecated.(õ ˜¤8Íã²Í»/
S
java:S1874"6Remove this use of "ProgressDialog"; it is deprecated.(õ ˜¤8Íã²Í»/
R
java:S1874"5Remove this use of "STYLE_SPINNER"; it is deprecated.(õ ˜¤8Íã²Í»/

java:S3252!"hUse static access with "com.google.android.material.snackbar.BaseTransientBottomBar" for "LENGTH_SHORT".(­ß™À8Ðã²Í»/

@ -0,0 +1,25 @@
x java:S116"cRename this field "ImageLayer1_Right_Bitmap" to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(ÀùÐŒ
x java:S116"cRename this field "ImageLayer2_Right_Bitmap" to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(À±ÇÉ
x java:S116"^Rename this field "Right_Bitmap_Source" to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(ãèˆúýÿÿÿÿ
Q java:S125#"<This block of commented-out lines of code should be removed.(ò²‡þ
c java:S100'"NRename this method name to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(®åœ×
c java:S100A"NRename this method name to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(ÃÕƒÊ
f java:S117'"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(®åœ×
f java:S117'"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(®åœ×
f java:S117A"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(ÃÕƒÊ
f java:S117A"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(ÃÕƒÊ
f java:S117A"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(ÃÕƒÊ
f java:S117A"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(ÃÕƒÊ
z
java:S1104"dMake ImageLayer1_Right_Bitmap a static final constant or non-public and provide accessors if needed.(ÀùÐŒ
z
java:S1104"dMake ImageLayer2_Right_Bitmap a static final constant or non-public and provide accessors if needed.(À±ÇÉ
i
java:S1104"SMake sliderX a static final constant or non-public and provide accessors if needed.(ó¯Èµ
z
java:S1104"_Make Right_Bitmap_Source a static final constant or non-public and provide accessors if needed.(ãèˆúýÿÿÿÿ
\
java:S21844"ACast one of the operands of this addition operation to a "float".(¨¦¯˜ÿÿÿÿÿ
\
java:S21844"ACast one of the operands of this addition operation to a "float".(¨¦¯˜ÿÿÿÿÿ

@ -0,0 +1,15 @@
X
java:S1874"6Remove this use of "ProgressDialog"; it is deprecated.(¹£½­úÿÿÿÿ8ú®ÝÕº/
X
java:S1874"6Remove this use of "ProgressDialog"; it is deprecated.(Õ±µ«ùÿÿÿÿ8ú®ÝÕº/
O
java:S1874"2Remove this use of "setMessage"; it is deprecated.(Ü<>Ÿ8ú¤ßÕº/
U
java:S1874"8Remove this use of "setProgressStyle"; it is deprecated.(õ ˜¤8û®ÝÕº/
S
java:S1874"6Remove this use of "ProgressDialog"; it is deprecated.(õ ˜¤8û¤ßÕº/
R
java:S1874"5Remove this use of "STYLE_SPINNER"; it is deprecated.(õ ˜¤8û¤ßÕº/

java:S3252!"hUse static access with "com.google.android.material.snackbar.BaseTransientBottomBar" for "LENGTH_SHORT".(­ß™À8ÿ®ÝÕº/

@ -0,0 +1,48 @@
p java:S116-"[Rename this field "imageLayer1_Left" to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(􃯴
q java:S116-"\Rename this field "imageLayer2_Right" to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(􃯴
j java:S1163"PRename this field "_this" to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(¯ƒÅâúÿÿÿÿ
E
java:S1659-"/Declare "imageLayer2_Right" on a separate line.(􃯴
U
java:S11616":Add the "@Override" annotation above this method signature(î•Éðûÿÿÿÿ
N
java:S1874="9Remove this use of "getDefaultDisplay"; it is deprecated.(̺â
G
java:S1874="2Remove this use of "getMetrics"; it is deprecated.(̺â
V java:S125q"<This block of commented-out lines of code should be removed.(ßë‡èûÿÿÿÿ
A
java:S1905x"&Remove this unnecessary cast to "int".(ãà†òùÿÿÿÿ
B
java:S1905"&Remove this unnecessary cast to "int".(ãà†òùÿÿÿÿ
W java:S125"<This block of commented-out lines of code should be removed.(µ…“”ùÿÿÿÿ
U
java:S1874¬">Remove this use of "startActivityForResult"; it is deprecated.(¨<>µª
\
java:S3252¬">Use static access with "android.app.Activity" for "RESULT_OK".(¨<>µª8´´øÔ»/
S
java:S2209¬"5Change this instance-reference to a static reference.(¨<>µª´øÔ»/
?
java:S1185¸"(Remove this method to simply inherit it.(¡Œþø
R
java:S1068/"7Remove this unused "rotatingTextWrapper" private field.(ô›¿Ñüÿÿÿÿ
>
java:S1604F"(Make this anonymous inner class a lambda(‡çÎœ
C
java:S1604M"(Make this anonymous inner class a lambda(™–Ôæüÿÿÿÿ
C
java:S1604S"(Make this anonymous inner class a lambda(ñ<>Éžÿÿÿÿÿ
>
java:S1604b"(Make this anonymous inner class a lambda(ýØ™•
C
java:S1604t"(Make this anonymous inner class a lambda(§Œ<C2A7>ÿüÿÿÿÿ
D
java:S1604"(Make this anonymous inner class a lambda(ûÛ¯šøÿÿÿÿ
C
java:S1604n"(Make this anonymous inner class a lambda(½ŸâÉþÿÿÿÿ
?
java:S1604<18>"(Make this anonymous inner class a lambda(ïüî™
?
java:S1604¨"(Make this anonymous inner class a lambda(”»Òø
W
java:S2184X"ACast one of the operands of this addition operation to a "float".(©ÉÕÇ

@ -0,0 +1,16 @@
l
java:S1450#"VRemove the "binding" field and declare it as a local variable in the relevant methods.(¶<>ÃÊ
}
java:S32525"gUse static access with "com.google.android.material.snackbar.BaseTransientBottomBar" for "LENGTH_LONG".(õ‰¯à
L
java:S1874G"7Remove this use of "FLAG_FULLSCREEN"; it is deprecated.(±ùËD
L
java:S1874G"7Remove this use of "FLAG_FULLSCREEN"; it is deprecated.(±ùËD
_
java:S1874R"JDon't override a deprecated method or explicitly mark it as "@Deprecated".(¹Î¾
E java:S108[")Either remove or fill this block of code.(<28>”ý¾8¥ÃòÔ»/
=
java:S16042"(Make this anonymous inner class a lambda(´ÛäX
_
java:S2259V"BA "NullPointerException" could be thrown; "data" is nullable here.(ó¶Ç‹8ÀÃòÔ»/

@ -0,0 +1,8 @@

java:S1450"jRemove the "clarityEnhancementViewModel" field and declare it as a local variable in the relevant methods.(€ú½ú
U
java:S1161":Add the "@Override" annotation above this method signature(î•Éðûÿÿÿÿ
C
java:S1604 "(Make this anonymous inner class a lambda(Ë´òŸýÿÿÿÿ
t java:S120"ZRename this package name to match the regular expression '^[a-z_]+(\.[a-z_][a-z0-9_]*)*$'.(ìð”¯øÿÿÿÿ

@ -0,0 +1,5 @@
E
java:S1905"(Remove this unnecessary cast to "float".(å–¼Ê8¶Æô¥»/
W
java:S1128"5Remove this unused import 'android.widget.ImageView'.(ú¯ëàüÿÿÿÿ8Âü³¡»/

@ -0,0 +1,2 @@
o java:S120"ZRename this package name to match the regular expression '^[a-z_]+(\.[a-z_][a-z0-9_]*)*$'.(þɘ¥

@ -0,0 +1,70 @@
v java:S1161"\Rename this field "ColorizedImageUri" to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(äûóÅüÿÿÿÿ
j java:S1162"PRename this field "_this" to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(Ö<E280B9>Üüÿÿÿÿ
I
java:S1874b"3Remove this use of "insertImage"; it is deprecated.(Ü©ÍÌ
I
java:S1874k"3Remove this use of "insertImage"; it is deprecated.(Ü©ÍÌ
`
java:S1854|"ERemove this useless assignment to local variable "squareProgressBar".(¤ªÞ÷ýÿÿÿÿ
Q
java:S1481|"6Remove this unused "squareProgressBar" local variable.(¤ªÞ÷ýÿÿÿÿ
i java:S100"NRename this method name to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(¤Õû¯øÿÿÿÿ
<EFBFBD>
java:S1186¸"€Add a nested comment explaining why this method is empty, throw an UnsupportedOperationException or complete the implementation.(ªšƒ¢üÿÿÿÿ
M
java:S1874Â"7Remove this use of "FLAG_FULLSCREEN"; it is deprecated.(±ùËD
M
java:S1874Â"7Remove this use of "FLAG_FULLSCREEN"; it is deprecated.(±ùËD
N
java:S1128"1Remove this unused import 'android.app.Activity'.(‡ÊÍÆ8‡¥ÛÏ»/
[
java:S1128 "9Remove this unused import 'android.os.PersistableBundle'.(у¨’øÿÿÿÿ8‡¥ÛÏ»/
O
java:S1128"-Remove this unused import 'android.util.Log'.(Ԧزøÿÿÿÿ8ˆ¥ÛÏ»/
K
java:S1128".Remove this unused import 'android.view.Menu'.(£¯Ò…8ˆ¥ÛÏ»/
V
java:S1128"9Remove this unused import 'androidx.annotation.Nullable'.(¾Èêš8ˆ¥ÛÏ»/
b
java:S1128"ERemove this unused import 'androidx.appcompat.app.AppCompatActivity'.(€¶£î8ˆ¥ÛÏ»/
\
java:S1128":Remove this unused import 'androidx.core.app.ShareCompat'.(Óƶòþÿÿÿÿ8ˆ¥ÛÏ»/
b
java:S1128"FRemove this unused import 'androidx.drawerlayout.widget.DrawerLayout'.(çÊþ8‰¥ÛÏ»/
`
java:S1128">Remove this unused import 'androidx.navigation.NavController'.(ˆç׃ÿÿÿÿÿ8‰¥ÛÏ»/
]
java:S1128";Remove this unused import 'androidx.navigation.Navigation'.(ÄíÚÀÿÿÿÿÿ8‰¥ÛÏ»/
b
java:S1128"@Remove this unused import 'androidx.navigation.ui.NavigationUI'.(„¾¯Êüÿÿÿÿ8‰¥ÛÏ»/
j
java:S1128 "MRemove this unused import 'com.example.myapplication.BaiduAPI.BaiduImageAPI'.(Ó—Ãä8‰¥ÛÏ»/
s
java:S1128""VRemove this unused import 'com.example.myapplication.databinding.ActivityMainBinding'.(«¬<C2AB>æ8Š¥ÛÏ»/

java:S1128%"^Remove this unused import 'com.example.myapplication.ui.ImageColorize.ImageColorizeViewModel'.(ÑÞÊðÿÿÿÿÿ8Š¥ÛÏ»/
t
java:S1128&"RRemove this unused import 'com.google.android.material.navigation.NavigationView'.(Û²ò<C2B2>ÿÿÿÿÿ8Š¥ÛÏ»/
g
java:S1128'"JRemove this unused import 'com.google.android.material.snackbar.Snackbar'.(넃Š8Š¥ÛÏ»/
N
java:S1068-"8Remove this unused "mAppBarConfiguration" private field.(ÒËþØ
>
java:S1604?"(Make this anonymous inner class a lambda(³å½š
>
java:S1604E"(Make this anonymous inner class a lambda(²œçþ
>
java:S1604N"(Make this anonymous inner class a lambda(Éßÿ÷
C
java:S1604h"(Make this anonymous inner class a lambda(óá«íýÿÿÿÿ
?
java:S1604"(Make this anonymous inner class a lambda(ž’Õ”
>
java:S1604x"(Make this anonymous inner class a lambda(€òêô
?
java:S1604"(Make this anonymous inner class a lambda(ÝŽÿä
D
java:S1604ž"(Make this anonymous inner class a lambda(稆<C2A8>øÿÿÿÿ
D
java:S1604¬"(Make this anonymous inner class a lambda(˜<>ã¯þÿÿÿÿ

@ -0,0 +1,14 @@
P
java:S1118 ":Add a private constructor to hide the implicit public one.(öÅÉ<C385>
c java:S100
"NRename this method name to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(áôþ¨
c java:S100"NRename this method name to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(Ö“¸¡
h java:S100"NRename this method name to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(öã<C3B6>Ãøÿÿÿÿ
|
java:S1488"aImmediately return this expression instead of assigning it to the temporary variable "byteArray".(ñÂôÐýÿÿÿÿ
t
java:S1488"^Immediately return this expression instead of assigning it to the temporary variable "bitmap".(Œ‹©Ì
~
java:S1488"cImmediately return this expression instead of assigning it to the temporary variable "decodedByte".(šéŠ…øÿÿÿÿ
t java:S120"ZRename this package name to match the regular expression '^[a-z_]+(\.[a-z_][a-z0-9_]*)*$'.(<28>ß“ãýÿÿÿÿ

@ -0,0 +1,2 @@
{ java:S120"ZRename this package name to match the regular expression '^[a-z_]+(\.[a-z_][a-z0-9_]*)*$'.(ìð”¯øÿÿÿÿ8´ïÑÖº/

@ -0,0 +1,15 @@
E
java:S3740&"/Provide the parametrized type for this generic.(¤òÙÀ
w
java:S1450"\Remove the "baiduImageAPI" field and declare it as a local variable in the relevant methods.(¢žõÂýÿÿÿÿ
œ
java:S11867"€Add a nested comment explaining why this method is empty, throw an UnsupportedOperationException or complete the implementation.(ªšƒ¢üÿÿÿÿ
Q java:S125/"<This block of commented-out lines of code should be removed.(·ø<C2B7>
Z
java:S1128"8Remove this unused import 'androidx.lifecycle.LiveData'.(ß„ÝÞþÿÿÿÿ8€ÊÍ»/
g
java:S1128 "JRemove this unused import 'com.google.android.material.snackbar.Snackbar'.(넃Š8€ÊÍ»/
f java:S117!"QRename this local variable to match the regular expression '^[a-z][a-zA-Z0-9]*$'.(Õüìð
p
java:S1104"TMake progress a static final constant or non-public and provide accessors if needed.(¯Íj8¶˜ýÌ»/

@ -0,0 +1,3 @@
F kotlin:S108Î")Either remove or fill this block of code.(±ß<C2B1>ûÿÿÿÿ
q kotlin:S3776<18>"RRefactor this method to reduce its Cognitive Complexity from 24 to the 15 allowed.(À€Œ8£ù½Õ»/

@ -0,0 +1,3 @@
L kotlin:S108m")Either remove or fill this block of code.(±ß<C2B1>ûÿÿÿÿ8Ú¡†Ô»/
r kotlin:S3776Ÿ"RRefactor this method to reduce its Cognitive Complexity from 19 to the 15 allowed.(Æì…ƒ8硆Ի/

@ -0,0 +1,6 @@
m
java:S3776="RRefactor this method to reduce its Cognitive Complexity from 19 to the 15 allowed.(<28><EFBFBD>¬üÿÿÿÿ
P
java:S2168ð"9Remove this dangerous instance of double-checked locking.(­÷ô‰
t java:S120"ZRename this package name to match the regular expression '^[a-z_]+(\.[a-z_][a-z0-9_]*)*$'.(<28>ß“ãýÿÿÿÿ

@ -0,0 +1,12 @@
o java:S115"ZRename this constant name to match the regular expression '^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$'.(Ó–¯š
o java:S115"ZRename this constant name to match the regular expression '^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$'.(žÌð¥
t java:S115"ZRename this constant name to match the regular expression '^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$'.(<28>ÝÎùÿÿÿÿ
t java:S115"ZRename this constant name to match the regular expression '^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$'.(²„µâùÿÿÿÿ
o java:S115"ZRename this constant name to match the regular expression '^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$'.(Ôå†Ç
t java:S115 "ZRename this constant name to match the regular expression '^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$'.(”¸âêøÿÿÿÿ
t java:S115
"ZRename this constant name to match the regular expression '^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$'.(œµª¾ýÿÿÿÿ
o java:S115 "ZRename this constant name to match the regular expression '^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$'.(é¿„Ö
t java:S115 "ZRename this constant name to match the regular expression '^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$'.(ìô°ßýÿÿÿÿ
o java:S120"ZRename this package name to match the regular expression '^[a-z_]+(\.[a-z_][a-z0-9_]*)*$'.(ˆÕ¿Š

@ -0,0 +1,2 @@
t java:S120"ZRename this package name to match the regular expression '^[a-z_]+(\.[a-z_][a-z0-9_]*)*$'.(ÇÂÄ„ùÿÿÿÿ

@ -0,0 +1,97 @@
|
Lapp/src/main/java/com/example/myapplication/Interfaces/RequestsListener.java,b/6/b64e0a299dbcf79634197da0f109bd3a0cedca1a
y
Iapp/src/main/java/com/example/myapplication/BaiduAPI/styleTransTypes.java,e/7/e71d1eeecdf835abc827ca32af95b1c69e8ca280

bapp/src/main/java/com/example/myapplication/ui/ClarityEnhancement/ClarityEnhancementViewModel.java,c/7/c7f2656785aee3a27def12ee509c33a8cc3fb001
{
Kapp/src/main/java/com/example/myapplication/ImageUtil/ImageLoaderUtils.java,2/7/27131eceab8a4ea5bf0952a2f198d34125ef409f
s
Capp/src/main/java/com/example/myapplication/ImageUtil/PhotoLib.java,c/2/c2391eec4aba3014e82d6abf11143155b4ca4ec3
v
Fapp/src/main/java/com/example/myapplication/ImageUtil/GlideEngine.java,d/a/dafa6eb4e21acf53d2024f3cc784f3dee7136b41

aapp/src/main/java/com/example/myapplication/ui/ClarityEnhancement/ClarityEnhancementFragment.java,a/6/a60dbd729d9076d954fd3aa53e0c80146f2740c9
}
Mapp/src/main/java/com/example/myapplication/ui/others/ResizableImageView.java,a/c/ac8381edf7474e59deab20c7c966533ffee150c0
ˆ
Xapp/src/main/java/com/example/myapplication/ui/ImageColorize/ImageColorizeViewModel.java,f/8/f8ff55332c4a1e6be14e6ec8d11db039ec4aa823

Wapp/src/main/java/com/example/myapplication/ui/ImageColorize/ImageColorizeFragment.java,5/a/5a52c33ffd5683cff49e75096c9d771abeee572c
ˆ
Xapp/src/main/java/com/example/myapplication/ui/image_upload/ImageUploadActivityView.java,b/b/bb5e6b5d75cc22c72476c23c9bc363443e697521
<EFBFBD>
]app/src/main/java/com/example/myapplication/ui/image_upload/ImageUploadActivityViewModel.java,c/9/c9d4a9d00d4686c44711354d2cafa2cfebe1edae
w
Gapp/src/main/java/com/example/myapplication/BaiduAPI/BaiduImageAPI.java,3/7/37061ddf9e78d1e57372c6e196bc7d8775492686
@
local.properties,0/7/0712df971a99ac4d2fccb8e0fb19f377f3374cca
A
gradle.properties,2/a/2afbb999f001938c88fa43fc2ef52abf0f8213e4
X
(gradle/wrapper/gradle-wrapper.properties,f/b/fbe448ebfc3eb2d4e308f6b8b043666f5b57235e
F
app/proguard-rules.pro,9/e/9e08934d811afe28fbc77aaa3c0d747b94348db9
?
settings.gradle,0/5/05efc8b1657769a27696d478ded1e95f38737233
\
,lib/src/main/java/com/example/lib/MyClass.kt,4/c/4c38ac75e1f6fe535d5e10d88743a95943ff2ebc
p
@lib/src/main/java/info/hannes/liveedgedetection/PolygonPoints.kt,e/f/efe9c47951c71a6e800c12a630112a62300c63e7
k
;lib/src/main/java/info/hannes/liveedgedetection/IScanner.kt,f/2/f2bc4af40b8605453a29b2bb575e64d10e8ee6f4
Z
*myapplication/src/main/AndroidManifest.xml,0/b/0bbca1fce58c1574d151410dedc43bd8845e570a
u
Emyapplication/src/main/java/info/hannes/liveedgedetection/IScanner.kt,2/0/200ce018dd283f7e24213be1a9b45836adcded59

Umyapplication/src/main/java/info/hannes/liveedgedetection/ImageDetectionProperties.kt,1/3/139d6f77a700c717c531cd2a91a203e046e4580f
J
myapplication/build.gradle,8/7/8757dd6bc03199c6ed861274bfd7005e95c00614
<
build.gradle,f/0/f07866736216be0ee2aba49e392191aeae700a35
@
app/build.gradle,f/4/f4a01d6a4fcb971362ec00a83903fd3902f52164
P
app/src/main/AndroidManifest.xml,8/c/8c55c3ccc257e5907959013f99656e4c8ec3903e

Rapp/src/main/java/com/example/myapplication/liveedgedetection/utils/CameraUtils.kt,7/c/7ce362e392c7d30a2cea8de7a03e5f3f4c031ed9

Rapp/src/main/java/com/example/myapplication/liveedgedetection/utils/BitmapUtils.kt,1/b/1b33e08024daf5453641a3d53e46e2d5f6b8aa08
ƒ
Sapp/src/main/java/com/example/myapplication/liveedgedetection/utils/ContextUtils.kt,a/e/ae19c1e1dfac9dbf158d6a53a27617bf1f057f5e

Papp/src/main/java/com/example/myapplication/liveedgedetection/utils/FileUtils.kt,3/b/3bcd8948f9b3e5414c803e71417afd3a3b60ac12
ƒ
Sapp/src/main/java/com/example/myapplication/liveedgedetection/view/Quadrilateral.kt,f/8/f896eef244dde91f95e9b488db64b7fb2ce6f771
p
@app/src/main/java/com/example/myapplication/ui/BaseFragment.java,8/b/8b78630e7dde3f049705d7cd4979b10ac6aa09b1
m
=app/src/main/java/com/example/myapplication/MainActivity.java,9/7/97d5d04e52812b9d0ac3b157695041e5f8fbc36a
p
@app/src/main/java/com/example/myapplication/ui/BaseActivity.java,5/d/5d6aa65084468f60149784678eabd2796115f29c

Papp/src/main/java/com/example/myapplication/ui/slideshow/SlideshowViewModel.java,7/1/71c8a3b40ad8217cf0741be9a74959fe1b5f8447

Oapp/src/main/java/com/example/myapplication/ui/slideshow/SlideshowFragment.java,8/b/8bed39f31be1618244149e1411c8434adc6b2c4f
<EFBFBD>
Qapp/src/main/java/com/example/myapplication/liveedgedetection/view/PolygonView.kt,c/b/cb2f25fc3f584805f0393a760bedd03d6b81a469
y
Iapp/src/main/java/com/example/myapplication/liveedgedetection/ScanHint.kt,2/b/2b31c6718b25f4d68b835ab56e279670da563e5c
~
Napp/src/main/java/com/example/myapplication/liveedgedetection/ScanConstants.kt,8/d/8d536770321d608dff037b85fc533c9d1dffcba3
~
Napp/src/main/java/com/example/myapplication/liveedgedetection/PolygonPoints.kt,6/5/650cf1d4607eb972ef8aae9c988291baedfff89a
y
Iapp/src/main/java/com/example/myapplication/liveedgedetection/IScanner.kt,b/6/b6098a44f7e9626d7b22242f004d2fa31cd6f812

Yapp/src/main/java/com/example/myapplication/liveedgedetection/ImageDetectionProperties.kt,7/6/76b7c8cb6c290aa7f989d9be3a87dfc08a3d2106

Papp/src/main/java/com/example/myapplication/liveedgedetection/utils/ScanUtils.kt,9/e/9e3ced67986cedf260ba75012a89dd6c0251eae8

Tapp/src/main/java/com/example/myapplication/liveedgedetection/view/ScanCanvasView.kt,a/a/aaf2b106b5061f14884a4c25a5bffda3491cef92

Uapp/src/main/java/com/example/myapplication/liveedgedetection/view/ScanSurfaceView.kt,4/8/4854119de32242a488335cbb0e07fdc8d6fa9416

Vapp/src/main/java/com/example/myapplication/liveedgedetection/activity/ScanActivity.kt,c/e/ce349a55d80faca58f91f08df888c62e9cb55453

1
app/.gitignore vendored

@ -0,0 +1 @@
/build

@ -0,0 +1,65 @@
plugins {
id 'com.android.application'
id 'kotlin-android'
id 'kotlin-android-extensions'
}
android {
compileSdk 30
defaultConfig {
applicationId "com.example.myapplication"
minSdk 21
targetSdk 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
buildFeatures {
viewBinding true
}
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.3.1'
implementation 'com.google.android.material:material:1.4.0'
implementation 'androidx.constraintlayout:constraintlayout:2.1.0'
implementation 'androidx.lifecycle:lifecycle-livedata-ktx:2.3.1'
implementation 'androidx.lifecycle:lifecycle-viewmodel-ktx:2.3.1'
implementation 'androidx.navigation:navigation-fragment:2.3.5'
implementation 'androidx.navigation:navigation-ui:2.3.5'
implementation 'com.android.volley:volley:1.2.0'
testImplementation 'junit:junit:4.+'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
implementation group: 'com.baidu.aip', name: 'java-sdk', version: '4.15.0'
implementation 'cn.hutool:hutool-all:5.7.10'
implementation 'io.github.lucksiege:pictureselector:v2.7.3-rc07'
implementation "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava"
implementation 'com.github.bumptech.glide:glide:4.9.0'
annotationProcessor 'com.github.bumptech.glide:compiler:4.9.0'
implementation 'com.github.ihimanshurawat:ImageWorker:1.2.0'
implementation 'com.github.karacce:Buttom:v1.0'
implementation 'com.sdsmdg.harjot:rotatingtext:1.0.2'
implementation 'ch.halcyon:squareprogressbar:1.6.4'
implementation 'com.github.chrisbanes:PhotoView:2.0.0'
implementation 'com.quickbirdstudios:opencv:4.5.2'
implementation 'com.jakewharton.timber:timber:5.0.1'
implementation 'com.itextpdf:itext7-core:7.1.8'
}

@ -0,0 +1,3 @@
# Default ignored files
/shelf/
/workspace.xml

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GradleSettings">
<option name="linkedExternalProjectsSettings">
<GradleProjectSettings>
<option name="testRunner" value="GRADLE" />
<option name="distributionType" value="DEFAULT_WRAPPED" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="resolveModulePerSourceSet" value="false" />
</GradleProjectSettings>
</option>
</component>
</project>

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/build/classes" />
</component>
<component name="ProjectType">
<option name="id" value="Android" />
</component>
</project>

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/modules/openCVLibrary.iml" filepath="$PROJECT_DIR$/.idea/modules/openCVLibrary.iml" />
</modules>
</component>
</project>

@ -0,0 +1,17 @@
apply plugin: 'com.android.library'
android {
compileSdkVersion 27
defaultConfig {
minSdkVersion 15
targetSdkVersion 27
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
}
}
}

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<lint>
<issue id="InlinedApi">
<ignore path="src\org\opencv\android\JavaCameraView.java" />
</issue>
<issue id="NewApi">
<ignore path="src\org\opencv\android\JavaCameraView.java" />
</issue>
</lint>

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv"
android:versionCode="3410"
android:versionName="3.4.1">
</manifest>

@ -0,0 +1,33 @@
package org.opencv.engine;
/**
* Class provides a Java interface for OpenCV Engine Service. It's synchronous with native OpenCVEngine class.
*/
interface OpenCVEngineInterface
{
/**
* @return Returns service version.
*/
int getEngineVersion();
/**
* Finds an installed OpenCV library.
* @param OpenCV version.
* @return Returns path to OpenCV native libs or an empty string if OpenCV can not be found.
*/
String getLibPathByVersion(String version);
/**
* Tries to install defined version of OpenCV from Google Play Market.
* @param OpenCV version.
* @return Returns true if installation was successful or OpenCV package has been already installed.
*/
boolean installVersion(String version);
/**
* Returns list of libraries in loading order, separated by semicolon.
* @param OpenCV version.
* @return Returns names of OpenCV libraries, separated by semicolon.
*/
String getLibraryList(String version);
}

@ -0,0 +1,390 @@
package org.opencv.android;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.net.Uri;
import android.os.IBinder;
import android.os.RemoteException;
import android.util.Log;
import org.opencv.core.Core;
import org.opencv.engine.OpenCVEngineInterface;
import java.io.File;
import java.util.StringTokenizer;
class AsyncServiceHelper
{
public static boolean initOpenCV(String Version, final Context AppContext,
final LoaderCallbackInterface Callback)
{
AsyncServiceHelper helper = new AsyncServiceHelper(Version, AppContext, Callback);
Intent intent = new Intent("org.opencv.engine.BIND");
intent.setPackage("org.opencv.engine");
if (AppContext.bindService(intent, helper.mServiceConnection, Context.BIND_AUTO_CREATE))
{
return true;
}
else
{
AppContext.unbindService(helper.mServiceConnection);
InstallService(AppContext, Callback);
return false;
}
}
protected AsyncServiceHelper(String Version, Context AppContext, LoaderCallbackInterface Callback)
{
mOpenCVersion = Version;
mUserAppCallback = Callback;
mAppContext = AppContext;
}
protected static final String TAG = "OpenCVManager/Helper";
protected static final int MINIMUM_ENGINE_VERSION = 2;
protected OpenCVEngineInterface mEngineService;
protected LoaderCallbackInterface mUserAppCallback;
protected String mOpenCVersion;
protected Context mAppContext;
protected static boolean mServiceInstallationProgress = false;
protected static boolean mLibraryInstallationProgress = false;
protected static boolean InstallServiceQuiet(Context context)
{
boolean result = true;
try
{
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(OPEN_CV_SERVICE_URL));
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
catch(Exception e)
{
result = false;
}
return result;
}
protected static void InstallService(final Context AppContext, final LoaderCallbackInterface Callback)
{
if (!mServiceInstallationProgress)
{
Log.d(TAG, "Request new service installation");
InstallCallbackInterface InstallQuery = new InstallCallbackInterface() {
private LoaderCallbackInterface mUserAppCallback = Callback;
public String getPackageName()
{
return "OpenCV Manager";
}
public void install() {
Log.d(TAG, "Trying to install OpenCV Manager via Google Play");
boolean result = InstallServiceQuiet(AppContext);
if (result)
{
mServiceInstallationProgress = true;
Log.d(TAG, "Package installation started");
}
else
{
Log.d(TAG, "OpenCV package was not installed!");
int Status = LoaderCallbackInterface.MARKET_ERROR;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Unbind from service");
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
}
public void cancel()
{
Log.d(TAG, "OpenCV library installation was canceled");
int Status = LoaderCallbackInterface.INSTALL_CANCELED;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
public void wait_install()
{
Log.e(TAG, "Installation was not started! Nothing to wait!");
}
};
Callback.onPackageInstall(InstallCallbackInterface.NEW_INSTALLATION, InstallQuery);
}
else
{
Log.d(TAG, "Waiting current installation process");
InstallCallbackInterface WaitQuery = new InstallCallbackInterface() {
private LoaderCallbackInterface mUserAppCallback = Callback;
public String getPackageName()
{
return "OpenCV Manager";
}
public void install()
{
Log.e(TAG, "Nothing to install we just wait current installation");
}
public void cancel()
{
Log.d(TAG, "Waiting for OpenCV canceled by user");
mServiceInstallationProgress = false;
int Status = LoaderCallbackInterface.INSTALL_CANCELED;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
public void wait_install()
{
InstallServiceQuiet(AppContext);
}
};
Callback.onPackageInstall(InstallCallbackInterface.INSTALLATION_PROGRESS, WaitQuery);
}
}
/**
* URL of OpenCV Manager page on Google Play Market.
*/
protected static final String OPEN_CV_SERVICE_URL = "market://details?id=org.opencv.engine";
protected ServiceConnection mServiceConnection = new ServiceConnection()
{
public void onServiceConnected(ComponentName className, IBinder service)
{
Log.d(TAG, "Service connection created");
mEngineService = OpenCVEngineInterface.Stub.asInterface(service);
if (null == mEngineService)
{
Log.d(TAG, "OpenCV Manager Service connection fails. May be service was not installed?");
InstallService(mAppContext, mUserAppCallback);
}
else
{
mServiceInstallationProgress = false;
try
{
if (mEngineService.getEngineVersion() < MINIMUM_ENGINE_VERSION)
{
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION);
return;
}
Log.d(TAG, "Trying to get library path");
String path = mEngineService.getLibPathByVersion(mOpenCVersion);
if ((null == path) || (path.length() == 0))
{
if (!mLibraryInstallationProgress)
{
InstallCallbackInterface InstallQuery = new InstallCallbackInterface() {
public String getPackageName()
{
return "OpenCV library";
}
public void install() {
Log.d(TAG, "Trying to install OpenCV lib via Google Play");
try
{
if (mEngineService.installVersion(mOpenCVersion))
{
mLibraryInstallationProgress = true;
Log.d(TAG, "Package installation started");
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
}
else
{
Log.d(TAG, "OpenCV package was not installed!");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.MARKET_ERROR);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.MARKET_ERROR);
}
} catch (RemoteException e) {
e.printStackTrace();;
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
public void cancel() {
Log.d(TAG, "OpenCV library installation was canceled");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INSTALL_CANCELED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INSTALL_CANCELED);
}
public void wait_install() {
Log.e(TAG, "Installation was not started! Nothing to wait!");
}
};
mUserAppCallback.onPackageInstall(InstallCallbackInterface.NEW_INSTALLATION, InstallQuery);
}
else
{
InstallCallbackInterface WaitQuery = new InstallCallbackInterface() {
public String getPackageName()
{
return "OpenCV library";
}
public void install() {
Log.e(TAG, "Nothing to install we just wait current installation");
}
public void cancel()
{
Log.d(TAG, "OpenCV library installation was canceled");
mLibraryInstallationProgress = false;
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INSTALL_CANCELED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INSTALL_CANCELED);
}
public void wait_install() {
Log.d(TAG, "Waiting for current installation");
try
{
if (!mEngineService.installVersion(mOpenCVersion))
{
Log.d(TAG, "OpenCV package was not installed!");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.MARKET_ERROR);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.MARKET_ERROR);
}
else
{
Log.d(TAG, "Wating for package installation");
}
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
} catch (RemoteException e) {
e.printStackTrace();
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
};
mUserAppCallback.onPackageInstall(InstallCallbackInterface.INSTALLATION_PROGRESS, WaitQuery);
}
return;
}
else
{
Log.d(TAG, "Trying to get library list");
mLibraryInstallationProgress = false;
String libs = mEngineService.getLibraryList(mOpenCVersion);
Log.d(TAG, "Library list: \"" + libs + "\"");
Log.d(TAG, "First attempt to load libs");
int status;
if (initOpenCVLibs(path, libs))
{
Log.d(TAG, "First attempt to load libs is OK");
String eol = System.getProperty("line.separator");
for (String str : Core.getBuildInformation().split(eol))
Log.i(TAG, str);
status = LoaderCallbackInterface.SUCCESS;
}
else
{
Log.d(TAG, "First attempt to load libs fails");
status = LoaderCallbackInterface.INIT_FAILED;
}
Log.d(TAG, "Init finished with status " + status);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(status);
}
}
catch (RemoteException e)
{
e.printStackTrace();
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
}
public void onServiceDisconnected(ComponentName className)
{
mEngineService = null;
}
};
private boolean loadLibrary(String AbsPath)
{
boolean result = true;
Log.d(TAG, "Trying to load library " + AbsPath);
try
{
System.load(AbsPath);
Log.d(TAG, "OpenCV libs init was ok!");
}
catch(UnsatisfiedLinkError e)
{
Log.d(TAG, "Cannot load library \"" + AbsPath + "\"");
e.printStackTrace();
result = false;
}
return result;
}
private boolean initOpenCVLibs(String Path, String Libs)
{
Log.d(TAG, "Trying to init OpenCV libs");
if ((null != Path) && (Path.length() != 0))
{
boolean result = true;
if ((null != Libs) && (Libs.length() != 0))
{
Log.d(TAG, "Trying to load libs by dependency list");
StringTokenizer splitter = new StringTokenizer(Libs, ";");
while(splitter.hasMoreTokens())
{
String AbsLibraryPath = Path + File.separator + splitter.nextToken();
result &= loadLibrary(AbsLibraryPath);
}
}
else
{
// If the dependencies list is not defined or empty.
String AbsLibraryPath = Path + File.separator + "libopencv_java3.so";
result = loadLibrary(AbsLibraryPath);
}
return result;
}
else
{
Log.d(TAG, "Library path \"" + Path + "\" is empty");
return false;
}
}
}

@ -0,0 +1,141 @@
package org.opencv.android;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.util.Log;
/**
* Basic implementation of LoaderCallbackInterface.
*/
public abstract class BaseLoaderCallback implements LoaderCallbackInterface {
public BaseLoaderCallback(Context AppContext) {
mAppContext = AppContext;
}
public void onManagerConnected(int status)
{
switch (status)
{
/** OpenCV initialization was successful. **/
case LoaderCallbackInterface.SUCCESS:
{
/** Application must override this method to handle successful library initialization. **/
} break;
/** OpenCV loader can not start Google Play Market. **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.e(TAG, "Package installation failed!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Package installation failed!");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
MarketErrorMessage.show();
} break;
/** Package installation has been canceled. **/
case LoaderCallbackInterface.INSTALL_CANCELED:
{
Log.d(TAG, "OpenCV library installation was canceled by user");
finish();
} break;
/** Application is incompatible with this version of OpenCV Manager. Possibly, a service update is required. **/
case LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION:
{
Log.d(TAG, "OpenCV Manager Service is uncompatible with this app!");
AlertDialog IncomatibilityMessage = new AlertDialog.Builder(mAppContext).create();
IncomatibilityMessage.setTitle("OpenCV Manager");
IncomatibilityMessage.setMessage("OpenCV Manager service is incompatible with this app. Try to update it via Google Play.");
IncomatibilityMessage.setCancelable(false); // This blocks the 'BACK' button
IncomatibilityMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
IncomatibilityMessage.show();
} break;
/** Other status, i.e. INIT_FAILED. **/
default:
{
Log.e(TAG, "OpenCV loading failed!");
AlertDialog InitFailedDialog = new AlertDialog.Builder(mAppContext).create();
InitFailedDialog.setTitle("OpenCV error");
InitFailedDialog.setMessage("OpenCV was not initialised correctly. Application will be shut down");
InitFailedDialog.setCancelable(false); // This blocks the 'BACK' button
InitFailedDialog.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
InitFailedDialog.show();
} break;
}
}
public void onPackageInstall(final int operation, final InstallCallbackInterface callback)
{
switch (operation)
{
case InstallCallbackInterface.NEW_INSTALLATION:
{
AlertDialog InstallMessage = new AlertDialog.Builder(mAppContext).create();
InstallMessage.setTitle("Package not found");
InstallMessage.setMessage(callback.getPackageName() + " package was not found! Try to install it?");
InstallMessage.setCancelable(false); // This blocks the 'BACK' button
InstallMessage.setButton(AlertDialog.BUTTON_POSITIVE, "Yes", new OnClickListener()
{
public void onClick(DialogInterface dialog, int which)
{
callback.install();
}
});
InstallMessage.setButton(AlertDialog.BUTTON_NEGATIVE, "No", new OnClickListener() {
public void onClick(DialogInterface dialog, int which)
{
callback.cancel();
}
});
InstallMessage.show();
} break;
case InstallCallbackInterface.INSTALLATION_PROGRESS:
{
AlertDialog WaitMessage = new AlertDialog.Builder(mAppContext).create();
WaitMessage.setTitle("OpenCV is not ready");
WaitMessage.setMessage("Installation is in progress. Wait or exit?");
WaitMessage.setCancelable(false); // This blocks the 'BACK' button
WaitMessage.setButton(AlertDialog.BUTTON_POSITIVE, "Wait", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
callback.wait_install();
}
});
WaitMessage.setButton(AlertDialog.BUTTON_NEGATIVE, "Exit", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
callback.cancel();
}
});
WaitMessage.show();
} break;
}
}
void finish()
{
((Activity) mAppContext).finish();
}
protected Context mAppContext;
private final static String TAG = "OpenCVLoader/BaseLoaderCallback";
}

@ -0,0 +1,298 @@
package org.opencv.android;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.*;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import java.util.Arrays;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
@TargetApi(21)
public class Camera2Renderer extends CameraGLRendererBase {
protected final String LOGTAG = "Camera2Renderer";
private CameraDevice mCameraDevice;
private CameraCaptureSession mCaptureSession;
private CaptureRequest.Builder mPreviewRequestBuilder;
private String mCameraID;
private Size mPreviewSize = new Size(-1, -1);
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
Camera2Renderer(CameraGLSurfaceView view) {
super(view);
}
@Override
protected void doStart() {
Log.d(LOGTAG, "doStart");
startBackgroundThread();
super.doStart();
}
@Override
protected void doStop() {
Log.d(LOGTAG, "doStop");
super.doStop();
stopBackgroundThread();
}
boolean cacPreviewSize(final int width, final int height) {
Log.i(LOGTAG, "cacPreviewSize: "+width+"x"+height);
if(mCameraID == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return false;
}
CameraManager manager = (CameraManager) mView.getContext()
.getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager
.getCameraCharacteristics(mCameraID);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int bestWidth = 0, bestHeight = 0;
float aspect = (float)width / height;
for (Size psize : map.getOutputSizes(SurfaceTexture.class)) {
int w = psize.getWidth(), h = psize.getHeight();
Log.d(LOGTAG, "trying size: "+w+"x"+h);
if ( width >= w && height >= h &&
bestWidth <= w && bestHeight <= h &&
Math.abs(aspect - (float)w/h) < 0.2 ) {
bestWidth = w;
bestHeight = h;
}
}
Log.i(LOGTAG, "best size: "+bestWidth+"x"+bestHeight);
if( bestWidth == 0 || bestHeight == 0 ||
mPreviewSize.getWidth() == bestWidth &&
mPreviewSize.getHeight() == bestHeight )
return false;
else {
mPreviewSize = new Size(bestWidth, bestHeight);
return true;
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "cacPreviewSize - Camera Access Exception");
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "cacPreviewSize - Illegal Argument Exception");
} catch (SecurityException e) {
Log.e(LOGTAG, "cacPreviewSize - Security Exception");
}
return false;
}
@Override
protected void openCamera(int id) {
Log.i(LOGTAG, "openCamera");
CameraManager manager = (CameraManager) mView.getContext().getSystemService(Context.CAMERA_SERVICE);
try {
String camList[] = manager.getCameraIdList();
if(camList.length == 0) {
Log.e(LOGTAG, "Error: camera isn't detected.");
return;
}
if(id == CameraBridgeViewBase.CAMERA_ID_ANY) {
mCameraID = camList[0];
} else {
for (String cameraID : camList) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
if( id == CameraBridgeViewBase.CAMERA_ID_BACK &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK ||
id == CameraBridgeViewBase.CAMERA_ID_FRONT &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
mCameraID = cameraID;
break;
}
}
}
if(mCameraID != null) {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException(
"Time out waiting to lock camera opening.");
}
Log.i(LOGTAG, "Opening camera: " + mCameraID);
manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "OpenCamera - Camera Access Exception");
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception");
} catch (SecurityException e) {
Log.e(LOGTAG, "OpenCamera - Security Exception");
} catch (InterruptedException e) {
Log.e(LOGTAG, "OpenCamera - Interrupted Exception");
}
}
@Override
protected void closeCamera() {
Log.i(LOGTAG, "closeCamera");
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
mCameraOpenCloseLock.release();
createCameraPreviewSession();
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
cameraDevice.close();
mCameraDevice = null;
mCameraOpenCloseLock.release();
}
@Override
public void onError(CameraDevice cameraDevice, int error) {
cameraDevice.close();
mCameraDevice = null;
mCameraOpenCloseLock.release();
}
};
private void createCameraPreviewSession() {
int w=mPreviewSize.getWidth(), h=mPreviewSize.getHeight();
Log.i(LOGTAG, "createCameraPreviewSession("+w+"x"+h+")");
if(w<0 || h<0)
return;
try {
mCameraOpenCloseLock.acquire();
if (null == mCameraDevice) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened");
return;
}
if (null != mCaptureSession) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started");
return;
}
if(null == mSTexture) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: preview SurfaceTexture is null");
return;
}
mSTexture.setDefaultBufferSize(w, h);
Surface surface = new Surface(mSTexture);
mPreviewRequestBuilder = mCameraDevice
.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession cameraCaptureSession) {
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
Log.i(LOGTAG, "CameraPreviewSession has been started");
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCaptureSession failed");
}
mCameraOpenCloseLock.release();
}
@Override
public void onConfigureFailed(
CameraCaptureSession cameraCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession failed");
mCameraOpenCloseLock.release();
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCameraPreviewSession");
} catch (InterruptedException e) {
throw new RuntimeException(
"Interrupted while createCameraPreviewSession", e);
}
finally {
//mCameraOpenCloseLock.release();
}
}
private void startBackgroundThread() {
Log.i(LOGTAG, "startBackgroundThread");
stopBackgroundThread();
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
Log.i(LOGTAG, "stopBackgroundThread");
if(mBackgroundThread == null)
return;
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e(LOGTAG, "stopBackgroundThread");
}
}
@Override
protected void setCameraPreviewSize(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize("+width+"x"+height+")");
if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth;
if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight;
try {
mCameraOpenCloseLock.acquire();
boolean needReconfig = cacPreviewSize(width, height);
mCameraWidth = mPreviewSize.getWidth();
mCameraHeight = mPreviewSize.getHeight();
if( !needReconfig ) {
mCameraOpenCloseLock.release();
return;
}
if (null != mCaptureSession) {
Log.d(LOGTAG, "closing existing previewSession");
mCaptureSession.close();
mCaptureSession = null;
}
mCameraOpenCloseLock.release();
createCameraPreviewSession();
} catch (InterruptedException e) {
mCameraOpenCloseLock.release();
throw new RuntimeException("Interrupted while setCameraPreviewSize.", e);
}
}
}

@ -0,0 +1,494 @@
package org.opencv.android;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import org.opencv.BuildConfig;
import org.opencv.R;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import java.util.List;
/**
* This is a basic class, implementing the interaction with Camera and OpenCV library.
* The main responsibility of it - is to control when camera can be enabled, process the frame,
* call external listener to make any adjustments to the frame and then draw the resulting
* frame to the screen.
* The clients shall implement CvCameraViewListener.
*/
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraBridge";
private static final int MAX_UNSPECIFIED = -1;
private static final int STOPPED = 0;
private static final int STARTED = 1;
private int mState = STOPPED;
private Bitmap mCacheBitmap;
private CvCameraViewListener2 mListener;
private boolean mSurfaceExist;
private final Object mSyncObject = new Object();
protected int mFrameWidth;
protected int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected float mScale = 0;
protected int mPreviewFormat = RGBA;
protected int mCameraIndex = CAMERA_ID_ANY;
protected boolean mEnabled;
protected FpsMeter mFpsMeter = null;
public static final int CAMERA_ID_ANY = -1;
public static final int CAMERA_ID_BACK = 99;
public static final int CAMERA_ID_FRONT = 98;
public static final int RGBA = 1;
public static final int GRAY = 2;
public CameraBridgeViewBase(Context context, int cameraId) {
super(context);
mCameraIndex = cameraId;
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
}
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
int count = attrs.getAttributeCount();
Log.d(TAG, "Attr count: " + Integer.valueOf(count));
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
if (styledAttrs.getBoolean(R.styleable.CameraBridgeViewBase_show_fps, false))
enableFpsMeter();
mCameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
styledAttrs.recycle();
}
/**
* Sets the camera index
* @param cameraIndex new camera index
*/
public void setCameraIndex(int cameraIndex) {
this.mCameraIndex = cameraIndex;
}
public interface CvCameraViewListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(Mat inputFrame);
}
public interface CvCameraViewListener2 {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(CvCameraViewFrame inputFrame);
};
protected class CvCameraViewListenerAdapter implements CvCameraViewListener2 {
public CvCameraViewListenerAdapter(CvCameraViewListener oldStypeListener) {
mOldStyleListener = oldStypeListener;
}
public void onCameraViewStarted(int width, int height) {
mOldStyleListener.onCameraViewStarted(width, height);
}
public void onCameraViewStopped() {
mOldStyleListener.onCameraViewStopped();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat result = null;
switch (mPreviewFormat) {
case RGBA:
result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
break;
case GRAY:
result = mOldStyleListener.onCameraFrame(inputFrame.gray());
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
};
return result;
}
public void setFrameFormat(int format) {
mPreviewFormat = format;
}
private int mPreviewFormat = RGBA;
private CvCameraViewListener mOldStyleListener;
};
/**
* This class interface is abstract representation of single frame from camera for onCameraFrame callback
* Attention: Do not use objects, that represents this interface out of onCameraFrame callback!
*/
public interface CvCameraViewFrame {
/**
* This method returns RGBA Mat with frame
*/
public Mat rgba();
/**
* This method returns single channel gray scale Mat with frame
*/
public Mat gray();
};
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
Log.d(TAG, "call surfaceChanged event");
synchronized(mSyncObject) {
if (!mSurfaceExist) {
mSurfaceExist = true;
checkCurrentState();
} else {
/** Surface changed. We need to stop camera and restart with new parameters */
/* Pretend that old surface has been destroyed */
mSurfaceExist = false;
checkCurrentState();
/* Now use new surface. Say we have it now */
mSurfaceExist = true;
checkCurrentState();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
/* Do nothing. Wait until surfaceChanged delivered */
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(mSyncObject) {
mSurfaceExist = false;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can enable the camera connection.
* The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available
*/
public void enableView() {
synchronized(mSyncObject) {
mEnabled = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can disable camera connection and stop
* the delivery of frames even though the surface view itself is not destroyed and still stays on the scren
*/
public void disableView() {
synchronized(mSyncObject) {
mEnabled = false;
checkCurrentState();
}
}
/**
* This method enables label with fps value on the screen
*/
public void enableFpsMeter() {
if (mFpsMeter == null) {
mFpsMeter = new FpsMeter();
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
}
public void disableFpsMeter() {
mFpsMeter = null;
}
/**
*
* @param listener
*/
public void setCvCameraViewListener(CvCameraViewListener2 listener) {
mListener = listener;
}
public void setCvCameraViewListener(CvCameraViewListener listener) {
CvCameraViewListenerAdapter adapter = new CvCameraViewListenerAdapter(listener);
adapter.setFrameFormat(mPreviewFormat);
mListener = adapter;
}
/**
* This method sets the maximum size that camera frame is allowed to be. When selecting
* size - the biggest size which less or equal the size set will be selected.
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
* preview frame will be selected with 176x152 size.
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
* @param maxWidth - the maximum width allowed for camera frame.
* @param maxHeight - the maximum height allowed for camera frame
*/
public void setMaxFrameSize(int maxWidth, int maxHeight) {
mMaxWidth = maxWidth;
mMaxHeight = maxHeight;
}
public void SetCaptureFormat(int format)
{
mPreviewFormat = format;
if (mListener instanceof CvCameraViewListenerAdapter) {
CvCameraViewListenerAdapter adapter = (CvCameraViewListenerAdapter) mListener;
adapter.setFrameFormat(mPreviewFormat);
}
}
/**
* Called when mSyncObject lock is held
*/
private void checkCurrentState() {
Log.d(TAG, "call checkCurrentState");
int targetState;
if (mEnabled && mSurfaceExist && getVisibility() == VISIBLE) {
targetState = STARTED;
} else {
targetState = STOPPED;
}
if (targetState != mState) {
/* The state change detected. Need to exit the current state and enter target state */
processExitState(mState);
mState = targetState;
processEnterState(mState);
}
}
private void processEnterState(int state) {
Log.d(TAG, "call processEnterState: " + state);
switch(state) {
case STARTED:
onEnterStartedState();
if (mListener != null) {
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
}
break;
case STOPPED:
onEnterStoppedState();
if (mListener != null) {
mListener.onCameraViewStopped();
}
break;
};
}
private void processExitState(int state) {
Log.d(TAG, "call processExitState: " + state);
switch(state) {
case STARTED:
onExitStartedState();
break;
case STOPPED:
onExitStoppedState();
break;
};
}
private void onEnterStoppedState() {
/* nothing to do */
}
private void onExitStoppedState() {
/* nothing to do */
}
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
Log.d(TAG, "call onEnterStartedState");
/* Connect camera */
if (!connectCamera(getWidth(), getHeight())) {
AlertDialog ad = new AlertDialog.Builder(getContext()).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
((Activity) getContext()).finish();
}
});
ad.show();
}
}
private void onExitStartedState() {
disconnectCamera();
if (mCacheBitmap != null) {
mCacheBitmap.recycle();
}
}
/**
* This method shall be called by the subclasses when they have valid
* object and want it to be delivered to external client (via callback) and
* then displayed on the screen.
* @param frame - the current frame to be delivered
*/
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
Canvas canvas = getHolder().lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
if (BuildConfig.DEBUG)
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
getHolder().unlockCanvasAndPost(canvas);
}
}
}
/**
* This method is invoked shall perform concrete operation to initialize the camera.
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
* initialized with the size of the Camera frames that will be delivered to external processor.
* @param width - the width of this SurfaceView
* @param height - the height of this SurfaceView
*/
protected abstract boolean connectCamera(int width, int height);
/**
* Disconnects and release the particular camera object being connected to this surface view.
* Called when syncObject lock is held
*/
protected abstract void disconnectCamera();
// NOTE: On Android 4.1.x the function must be called before SurfaceTexture constructor!
protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}
public interface ListItemAccessor {
public int getWidth(Object obj);
public int getHeight(Object obj);
};
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
* @param supportedSizes
* @param surfaceWidth
* @param surfaceHeight
* @return optimal frame size
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int) width;
calcHeight = (int) height;
}
}
}
return new Size(calcWidth, calcHeight);
}
}

@ -0,0 +1,438 @@
package org.opencv.android;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.View;
import org.opencv.android.CameraGLSurfaceView.CameraTextureListener;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
@TargetApi(15)
public abstract class CameraGLRendererBase implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
protected final String LOGTAG = "CameraGLRendererBase";
// shaders
private final String vss = ""
+ "attribute vec2 vPosition;\n"
+ "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n"
+ "void main() {\n" + " texCoord = vTexCoord;\n"
+ " gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );\n"
+ "}";
private final String fssOES = ""
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
private final String fss2D = ""
+ "precision mediump float;\n"
+ "uniform sampler2D sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
// coord-s
private final float vertices[] = {
-1, -1,
-1, 1,
1, -1,
1, 1 };
private final float texCoordOES[] = {
0, 1,
0, 0,
1, 1,
1, 0 };
private final float texCoord2D[] = {
0, 0,
0, 1,
1, 0,
1, 1 };
private int[] texCamera = {0}, texFBO = {0}, texDraw = {0};
private int[] FBO = {0};
private int progOES = -1, prog2D = -1;
private int vPosOES, vTCOES, vPos2D, vTC2D;
private FloatBuffer vert, texOES, tex2D;
protected int mCameraWidth = -1, mCameraHeight = -1;
protected int mFBOWidth = -1, mFBOHeight = -1;
protected int mMaxCameraWidth = -1, mMaxCameraHeight = -1;
protected int mCameraIndex = CameraBridgeViewBase.CAMERA_ID_ANY;
protected SurfaceTexture mSTexture;
protected boolean mHaveSurface = false;
protected boolean mHaveFBO = false;
protected boolean mUpdateST = false;
protected boolean mEnabled = true;
protected boolean mIsStarted = false;
protected CameraGLSurfaceView mView;
protected abstract void openCamera(int id);
protected abstract void closeCamera();
protected abstract void setCameraPreviewSize(int width, int height); // updates mCameraWidth & mCameraHeight
public CameraGLRendererBase(CameraGLSurfaceView view) {
mView = view;
int bytes = vertices.length * Float.SIZE / Byte.SIZE;
vert = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
texOES = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
tex2D = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
vert.put(vertices).position(0);
texOES.put(texCoordOES).position(0);
tex2D.put(texCoord2D).position(0);
}
@Override
public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture) {
//Log.i(LOGTAG, "onFrameAvailable");
mUpdateST = true;
mView.requestRender();
}
@Override
public void onDrawFrame(GL10 gl) {
//Log.i(LOGTAG, "onDrawFrame start");
if (!mHaveFBO)
return;
synchronized(this) {
if (mUpdateST) {
mSTexture.updateTexImage();
mUpdateST = false;
}
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
CameraTextureListener texListener = mView.getCameraTextureListener();
if(texListener != null) {
//Log.d(LOGTAG, "haveUserCallback");
// texCamera(OES) -> texFBO
drawTex(texCamera[0], true, FBO[0]);
// call user code (texFBO -> texDraw)
boolean modified = texListener.onCameraTexture(texFBO[0], texDraw[0], mCameraWidth, mCameraHeight);
if(modified) {
// texDraw -> screen
drawTex(texDraw[0], false, 0);
} else {
// texFBO -> screen
drawTex(texFBO[0], false, 0);
}
} else {
Log.d(LOGTAG, "texCamera(OES) -> screen");
// texCamera(OES) -> screen
drawTex(texCamera[0], true, 0);
}
//Log.i(LOGTAG, "onDrawFrame end");
}
}
@Override
public void onSurfaceChanged(GL10 gl, int surfaceWidth, int surfaceHeight) {
Log.i(LOGTAG, "onSurfaceChanged("+surfaceWidth+"x"+surfaceHeight+")");
mHaveSurface = true;
updateState();
setPreviewSize(surfaceWidth, surfaceHeight);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.i(LOGTAG, "onSurfaceCreated");
initShaders();
}
private void initShaders() {
String strGLVersion = GLES20.glGetString(GLES20.GL_VERSION);
if (strGLVersion != null)
Log.i(LOGTAG, "OpenGL ES version: " + strGLVersion);
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
progOES = loadShader(vss, fssOES);
vPosOES = GLES20.glGetAttribLocation(progOES, "vPosition");
vTCOES = GLES20.glGetAttribLocation(progOES, "vTexCoord");
GLES20.glEnableVertexAttribArray(vPosOES);
GLES20.glEnableVertexAttribArray(vTCOES);
prog2D = loadShader(vss, fss2D);
vPos2D = GLES20.glGetAttribLocation(prog2D, "vPosition");
vTC2D = GLES20.glGetAttribLocation(prog2D, "vTexCoord");
GLES20.glEnableVertexAttribArray(vPos2D);
GLES20.glEnableVertexAttribArray(vTC2D);
}
private void initSurfaceTexture() {
Log.d(LOGTAG, "initSurfaceTexture");
deleteSurfaceTexture();
initTexOES(texCamera);
mSTexture = new SurfaceTexture(texCamera[0]);
mSTexture.setOnFrameAvailableListener(this);
}
private void deleteSurfaceTexture() {
Log.d(LOGTAG, "deleteSurfaceTexture");
if(mSTexture != null) {
mSTexture.release();
mSTexture = null;
deleteTex(texCamera);
}
}
private void initTexOES(int[] tex) {
if(tex.length == 1) {
GLES20.glGenTextures(1, tex, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
}
private static void deleteTex(int[] tex) {
if(tex.length == 1) {
GLES20.glDeleteTextures(1, tex, 0);
}
}
private static int loadShader(String vss, String fss) {
Log.d("CameraGLRendererBase", "loadShader");
int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vshader, vss);
GLES20.glCompileShader(vshader);
int[] status = new int[1];
GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not compile vertex shader: "+GLES20.glGetShaderInfoLog(vshader));
GLES20.glDeleteShader(vshader);
vshader = 0;
return 0;
}
int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fshader, fss);
GLES20.glCompileShader(fshader);
GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not compile fragment shader:"+GLES20.glGetShaderInfoLog(fshader));
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
fshader = 0;
return 0;
}
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vshader);
GLES20.glAttachShader(program, fshader);
GLES20.glLinkProgram(program);
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not link shader program: "+GLES20.glGetProgramInfoLog(program));
program = 0;
return 0;
}
GLES20.glValidateProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_VALIDATE_STATUS, status, 0);
if (status[0] == 0)
{
Log.e("CameraGLRendererBase", "Shader program validation error: "+GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
return 0;
}
Log.d("CameraGLRendererBase", "Shader program is built OK");
return program;
}
private void deleteFBO()
{
Log.d(LOGTAG, "deleteFBO("+mFBOWidth+"x"+mFBOHeight+")");
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glDeleteFramebuffers(1, FBO, 0);
deleteTex(texFBO);
deleteTex(texDraw);
mFBOWidth = mFBOHeight = 0;
}
private void initFBO(int width, int height)
{
Log.d(LOGTAG, "initFBO("+width+"x"+height+")");
deleteFBO();
GLES20.glGenTextures(1, texDraw, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texDraw[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glGenTextures(1, texFBO, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texFBO[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
//int hFBO;
GLES20.glGenFramebuffers(1, FBO, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, FBO[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texFBO[0], 0);
Log.d(LOGTAG, "initFBO error status: " + GLES20.glGetError());
int FBOstatus = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if (FBOstatus != GLES20.GL_FRAMEBUFFER_COMPLETE)
Log.e(LOGTAG, "initFBO failed, status: " + FBOstatus);
mFBOWidth = width;
mFBOHeight = height;
}
// draw texture to FBO or to screen if fbo == 0
private void drawTex(int tex, boolean isOES, int fbo)
{
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo);
if(fbo == 0)
GLES20.glViewport(0, 0, mView.getWidth(), mView.getHeight());
else
GLES20.glViewport(0, 0, mFBOWidth, mFBOHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if(isOES) {
GLES20.glUseProgram(progOES);
GLES20.glVertexAttribPointer(vPosOES, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTCOES, 2, GLES20.GL_FLOAT, false, 4*2, texOES);
} else {
GLES20.glUseProgram(prog2D);
GLES20.glVertexAttribPointer(vPos2D, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTC2D, 2, GLES20.GL_FLOAT, false, 4*2, tex2D);
}
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
if(isOES) {
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(progOES, "sTexture"), 0);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(prog2D, "sTexture"), 0);
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glFlush();
}
public synchronized void enableView() {
Log.d(LOGTAG, "enableView");
mEnabled = true;
updateState();
}
public synchronized void disableView() {
Log.d(LOGTAG, "disableView");
mEnabled = false;
updateState();
}
protected void updateState() {
Log.d(LOGTAG, "updateState");
Log.d(LOGTAG, "mEnabled="+mEnabled+", mHaveSurface="+mHaveSurface);
boolean willStart = mEnabled && mHaveSurface && mView.getVisibility() == View.VISIBLE;
if (willStart != mIsStarted) {
if(willStart) doStart();
else doStop();
} else {
Log.d(LOGTAG, "keeping State unchanged");
}
Log.d(LOGTAG, "updateState end");
}
protected synchronized void doStart() {
Log.d(LOGTAG, "doStart");
initSurfaceTexture();
openCamera(mCameraIndex);
mIsStarted = true;
if(mCameraWidth>0 && mCameraHeight>0)
setPreviewSize(mCameraWidth, mCameraHeight); // start preview and call listener.onCameraViewStarted()
}
protected void doStop() {
Log.d(LOGTAG, "doStop");
synchronized(this) {
mUpdateST = false;
mIsStarted = false;
mHaveFBO = false;
closeCamera();
deleteSurfaceTexture();
}
CameraTextureListener listener = mView.getCameraTextureListener();
if(listener != null) listener.onCameraViewStopped();
}
protected void setPreviewSize(int width, int height) {
synchronized(this) {
mHaveFBO = false;
mCameraWidth = width;
mCameraHeight = height;
setCameraPreviewSize(width, height); // can change mCameraWidth & mCameraHeight
initFBO(mCameraWidth, mCameraHeight);
mHaveFBO = true;
}
CameraTextureListener listener = mView.getCameraTextureListener();
if(listener != null) listener.onCameraViewStarted(mCameraWidth, mCameraHeight);
}
public void setCameraIndex(int cameraIndex) {
disableView();
mCameraIndex = cameraIndex;
enableView();
}
public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) {
disableView();
mMaxCameraWidth = maxWidth;
mMaxCameraHeight = maxHeight;
enableView();
}
public void onResume() {
Log.i(LOGTAG, "onResume");
}
public void onPause() {
Log.i(LOGTAG, "onPause");
mHaveSurface = false;
updateState();
mCameraWidth = mCameraHeight = -1;
}
}

@ -0,0 +1,118 @@
package org.opencv.android;
import android.content.Context;
import android.content.res.TypedArray;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import org.opencv.R;
public class CameraGLSurfaceView extends GLSurfaceView {
private static final String LOGTAG = "CameraGLSurfaceView";
public interface CameraTextureListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when a new preview frame from Camera is ready.
* @param texIn - the OpenGL texture ID that contains frame in RGBA format
* @param texOut - the OpenGL texture ID that can be used to store modified frame image t display
* @param width - the width of the frame
* @param height - the height of the frame
* @return `true` if `texOut` should be displayed, `false` - to show `texIn`
*/
public boolean onCameraTexture(int texIn, int texOut, int width, int height);
};
private CameraTextureListener mTexListener;
private CameraGLRendererBase mRenderer;
public CameraGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
int cameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
styledAttrs.recycle();
if(android.os.Build.VERSION.SDK_INT >= 21)
mRenderer = new Camera2Renderer(this);
else
mRenderer = new CameraRenderer(this);
setCameraIndex(cameraIndex);
setEGLContextClientVersion(2);
setRenderer(mRenderer);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
public void setCameraTextureListener(CameraTextureListener texListener)
{
mTexListener = texListener;
}
public CameraTextureListener getCameraTextureListener()
{
return mTexListener;
}
public void setCameraIndex(int cameraIndex) {
mRenderer.setCameraIndex(cameraIndex);
}
public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) {
mRenderer.setMaxCameraPreviewSize(maxWidth, maxHeight);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
super.surfaceCreated(holder);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mRenderer.mHaveSurface = false;
super.surfaceDestroyed(holder);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
super.surfaceChanged(holder, format, w, h);
}
@Override
public void onResume() {
Log.i(LOGTAG, "onResume");
super.onResume();
mRenderer.onResume();
}
@Override
public void onPause() {
Log.i(LOGTAG, "onPause");
mRenderer.onPause();
super.onPause();
}
public void enableView() {
mRenderer.enableView();
}
public void disableView() {
mRenderer.disableView();
}
}

@ -0,0 +1,166 @@
package org.opencv.android;
import android.annotation.TargetApi;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.os.Build;
import android.util.Log;
import java.io.IOException;
import java.util.List;
@TargetApi(15)
@SuppressWarnings("deprecation")
public class CameraRenderer extends CameraGLRendererBase {
public static final String LOGTAG = "CameraRenderer";
private Camera mCamera;
private boolean mPreviewStarted = false;
CameraRenderer(CameraGLSurfaceView view) {
super(view);
}
@Override
protected synchronized void closeCamera() {
Log.i(LOGTAG, "closeCamera");
if(mCamera != null) {
mCamera.stopPreview();
mPreviewStarted = false;
mCamera.release();
mCamera = null;
}
}
@Override
protected synchronized void openCamera(int id) {
Log.i(LOGTAG, "openCamera");
closeCamera();
if (id == CameraBridgeViewBase.CAMERA_ID_ANY) {
Log.d(LOGTAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(LOGTAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(LOGTAG, "Trying to open camera with new open(" + camIdx + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(LOGTAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
int localCameraIndex = mCameraIndex;
if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) {
Log.i(LOGTAG, "Trying to open BACK camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
localCameraIndex = camIdx;
break;
}
}
} else if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) {
Log.i(LOGTAG, "Trying to open FRONT camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
localCameraIndex = camIdx;
break;
}
}
}
if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) {
Log.e(LOGTAG, "Back camera not found!");
} else if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) {
Log.e(LOGTAG, "Front camera not found!");
} else {
Log.d(LOGTAG, "Trying to open camera with new open(" + localCameraIndex + ")");
try {
mCamera = Camera.open(localCameraIndex);
} catch (RuntimeException e) {
Log.e(LOGTAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
}
}
}
if(mCamera == null) {
Log.e(LOGTAG, "Error: can't open camera");
return;
}
Camera.Parameters params = mCamera.getParameters();
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
try {
mCamera.setPreviewTexture(mSTexture);
} catch (IOException ioe) {
Log.e(LOGTAG, "setPreviewTexture() failed: " + ioe.getMessage());
}
}
@Override
public synchronized void setCameraPreviewSize(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize: "+width+"x"+height);
if(mCamera == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return;
}
if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth;
if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight;
Camera.Parameters param = mCamera.getParameters();
List<Size> psize = param.getSupportedPreviewSizes();
int bestWidth = 0, bestHeight = 0;
if (psize.size() > 0) {
float aspect = (float)width / height;
for (Size size : psize) {
int w = size.width, h = size.height;
Log.d(LOGTAG, "checking camera preview size: "+w+"x"+h);
if ( w <= width && h <= height &&
w >= bestWidth && h >= bestHeight &&
Math.abs(aspect - (float)w/h) < 0.2 ) {
bestWidth = w;
bestHeight = h;
}
}
if(bestWidth <= 0 || bestHeight <= 0) {
bestWidth = psize.get(0).width;
bestHeight = psize.get(0).height;
Log.e(LOGTAG, "Error: best size was not selected, using "+bestWidth+" x "+bestHeight);
} else {
Log.i(LOGTAG, "Selected best size: "+bestWidth+" x "+bestHeight);
}
if(mPreviewStarted) {
mCamera.stopPreview();
mPreviewStarted = false;
}
mCameraWidth = bestWidth;
mCameraHeight = bestHeight;
param.setPreviewSize(bestWidth, bestHeight);
}
param.set("orientation", "landscape");
mCamera.setParameters(param);
mCamera.startPreview();
mPreviewStarted = true;
}
}

@ -0,0 +1,65 @@
package org.opencv.android;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.Log;
import org.opencv.core.Core;
import java.text.DecimalFormat;
public class FpsMeter {
private static final String TAG = "FpsMeter";
private static final int STEP = 20;
private static final DecimalFormat FPS_FORMAT = new DecimalFormat("0.00");
private int mFramesCouner;
private double mFrequency;
private long mprevFrameTime;
private String mStrfps;
Paint mPaint;
boolean mIsInitialized = false;
int mWidth = 0;
int mHeight = 0;
public void init() {
mFramesCouner = 0;
mFrequency = Core.getTickFrequency();
mprevFrameTime = Core.getTickCount();
mStrfps = "";
mPaint = new Paint();
mPaint.setColor(Color.BLUE);
mPaint.setTextSize(20);
}
public void measure() {
if (!mIsInitialized) {
init();
mIsInitialized = true;
} else {
mFramesCouner++;
if (mFramesCouner % STEP == 0) {
long time = Core.getTickCount();
double fps = STEP * mFrequency / (time - mprevFrameTime);
mprevFrameTime = time;
if (mWidth != 0 && mHeight != 0)
mStrfps = FPS_FORMAT.format(fps) + " FPS@" + Integer.valueOf(mWidth) + "x" + Integer.valueOf(mHeight);
else
mStrfps = FPS_FORMAT.format(fps) + " FPS";
Log.i(TAG, mStrfps);
}
}
}
public void setResolution(int width, int height) {
mWidth = width;
mHeight = height;
}
public void draw(Canvas canvas, float offsetx, float offsety) {
Log.d(TAG, mStrfps);
canvas.drawText(mStrfps, offsetx, offsety, mPaint);
}
}

@ -0,0 +1,34 @@
package org.opencv.android;
/**
* Installation callback interface.
*/
public interface InstallCallbackInterface
{
/**
* New package installation is required.
*/
static final int NEW_INSTALLATION = 0;
/**
* Current package installation is in progress.
*/
static final int INSTALLATION_PROGRESS = 1;
/**
* Target package name.
* @return Return target package name.
*/
public String getPackageName();
/**
* Installation is approved.
*/
public void install();
/**
* Installation is canceled.
*/
public void cancel();
/**
* Wait for package installation.
*/
public void wait_install();
};

@ -0,0 +1,368 @@
package org.opencv.android;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.ImageFormat;
import android.hardware.camera2.*;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.ViewGroup.LayoutParams;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
import java.nio.ByteBuffer;
import java.util.Arrays;
/**
* This class is an implementation of the Bridge View between OpenCV and Java Camera.
* This class relays on the functionality available in base class and only implements
* required functions:
* connectCamera - opens Java camera and sets the PreviewCallback to be delivered.
* disconnectCamera - closes the camera and stops preview.
* When frame is delivered via callback from Camera - it processed via OpenCV to be
* converted to RGBA32 and then passed to the external callback for modifications if required.
*/
@TargetApi(21)
public class JavaCamera2View extends CameraBridgeViewBase {
private static final String LOGTAG = "JavaCamera2View";
private ImageReader mImageReader;
private int mPreviewFormat = ImageFormat.YUV_420_888;
private CameraDevice mCameraDevice;
private CameraCaptureSession mCaptureSession;
private CaptureRequest.Builder mPreviewRequestBuilder;
private String mCameraID;
private android.util.Size mPreviewSize = new android.util.Size(-1, -1);
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
public JavaCamera2View(Context context, int cameraId) {
super(context, cameraId);
}
public JavaCamera2View(Context context, AttributeSet attrs) {
super(context, attrs);
}
private void startBackgroundThread() {
Log.i(LOGTAG, "startBackgroundThread");
stopBackgroundThread();
mBackgroundThread = new HandlerThread("OpenCVCameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
Log.i(LOGTAG, "stopBackgroundThread");
if (mBackgroundThread == null)
return;
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e(LOGTAG, "stopBackgroundThread", e);
}
}
protected boolean initializeCamera() {
Log.i(LOGTAG, "initializeCamera");
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
String camList[] = manager.getCameraIdList();
if (camList.length == 0) {
Log.e(LOGTAG, "Error: camera isn't detected.");
return false;
}
if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_ANY) {
mCameraID = camList[0];
} else {
for (String cameraID : camList) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
if ((mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) ||
(mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT)
) {
mCameraID = cameraID;
break;
}
}
}
if (mCameraID != null) {
Log.i(LOGTAG, "Opening camera: " + mCameraID);
manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler);
}
return true;
} catch (CameraAccessException e) {
Log.e(LOGTAG, "OpenCamera - Camera Access Exception", e);
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception", e);
} catch (SecurityException e) {
Log.e(LOGTAG, "OpenCamera - Security Exception", e);
}
return false;
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(CameraDevice cameraDevice, int error) {
cameraDevice.close();
mCameraDevice = null;
}
};
private void createCameraPreviewSession() {
final int w = mPreviewSize.getWidth(), h = mPreviewSize.getHeight();
Log.i(LOGTAG, "createCameraPreviewSession(" + w + "x" + h + ")");
if (w < 0 || h < 0)
return;
try {
if (null == mCameraDevice) {
Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened");
return;
}
if (null != mCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started");
return;
}
mImageReader = ImageReader.newInstance(w, h, mPreviewFormat, 2);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null)
return;
// sanity checks - 3 planes
Image.Plane[] planes = image.getPlanes();
assert (planes.length == 3);
assert (image.getFormat() == mPreviewFormat);
// see also https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888
// Y plane (0) non-interleaved => stride == 1; U/V plane interleaved => stride == 2
assert (planes[0].getPixelStride() == 1);
assert (planes[1].getPixelStride() == 2);
assert (planes[2].getPixelStride() == 2);
ByteBuffer y_plane = planes[0].getBuffer();
ByteBuffer uv_plane = planes[1].getBuffer();
Mat y_mat = new Mat(h, w, CvType.CV_8UC1, y_plane);
Mat uv_mat = new Mat(h / 2, w / 2, CvType.CV_8UC2, uv_plane);
JavaCamera2Frame tempFrame = new JavaCamera2Frame(y_mat, uv_mat, w, h);
deliverAndDrawFrame(tempFrame);
tempFrame.release();
image.close();
}
}, mBackgroundHandler);
Surface surface = mImageReader.getSurface();
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
Log.i(LOGTAG, "createCaptureSession::onConfigured");
if (null == mCameraDevice) {
return; // camera is already closed
}
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
Log.i(LOGTAG, "CameraPreviewSession has been started");
} catch (Exception e) {
Log.e(LOGTAG, "createCaptureSession failed", e);
}
}
@Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession failed");
}
},
null
);
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCameraPreviewSession", e);
}
}
@Override
protected void disconnectCamera() {
Log.i(LOGTAG, "closeCamera");
try {
CameraDevice c = mCameraDevice;
mCameraDevice = null;
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != c) {
c.close();
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} finally {
stopBackgroundThread();
}
}
boolean calcPreviewSize(final int width, final int height) {
Log.i(LOGTAG, "calcPreviewSize: " + width + "x" + height);
if (mCameraID == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return false;
}
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraID);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int bestWidth = 0, bestHeight = 0;
float aspect = (float) width / height;
android.util.Size[] sizes = map.getOutputSizes(ImageReader.class);
bestWidth = sizes[0].getWidth();
bestHeight = sizes[0].getHeight();
for (android.util.Size sz : sizes) {
int w = sz.getWidth(), h = sz.getHeight();
Log.d(LOGTAG, "trying size: " + w + "x" + h);
if (width >= w && height >= h && bestWidth <= w && bestHeight <= h
&& Math.abs(aspect - (float) w / h) < 0.2) {
bestWidth = w;
bestHeight = h;
}
}
Log.i(LOGTAG, "best size: " + bestWidth + "x" + bestHeight);
assert(!(bestWidth == 0 || bestHeight == 0));
if (mPreviewSize.getWidth() == bestWidth && mPreviewSize.getHeight() == bestHeight)
return false;
else {
mPreviewSize = new android.util.Size(bestWidth, bestHeight);
return true;
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "calcPreviewSize - Camera Access Exception", e);
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "calcPreviewSize - Illegal Argument Exception", e);
} catch (SecurityException e) {
Log.e(LOGTAG, "calcPreviewSize - Security Exception", e);
}
return false;
}
@Override
protected boolean connectCamera(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize(" + width + "x" + height + ")");
startBackgroundThread();
initializeCamera();
try {
boolean needReconfig = calcPreviewSize(width, height);
mFrameWidth = mPreviewSize.getWidth();
mFrameHeight = mPreviewSize.getHeight();
if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
mScale = Math.min(((float)height)/mFrameHeight, ((float)width)/mFrameWidth);
else
mScale = 0;
AllocateCache();
if (needReconfig) {
if (null != mCaptureSession) {
Log.d(LOGTAG, "closing existing previewSession");
mCaptureSession.close();
mCaptureSession = null;
}
createCameraPreviewSession();
}
} catch (RuntimeException e) {
throw new RuntimeException("Interrupted while setCameraPreviewSize.", e);
}
return true;
}
private class JavaCamera2Frame implements CvCameraViewFrame {
@Override
public Mat gray() {
return mYuvFrameData.submat(0, mHeight, 0, mWidth);
}
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else if (mPreviewFormat == ImageFormat.YUV_420_888) {
assert (mUVFrameData != null);
Imgproc.cvtColorTwoPlane(mYuvFrameData, mUVFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21);
} else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
public JavaCamera2Frame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mUVFrameData = null;
mRgba = new Mat();
}
public JavaCamera2Frame(Mat Y, Mat UV, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Y;
mUVFrameData = UV;
mRgba = new Mat();
}
public void release() {
mRgba.release();
}
private Mat mYuvFrameData;
private Mat mUVFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
};
}

@ -0,0 +1,378 @@
package org.opencv.android;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Log;
import android.view.ViewGroup.LayoutParams;
import org.opencv.BuildConfig;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import java.util.List;
/**
* This class is an implementation of the Bridge View between OpenCV and Java Camera.
* This class relays on the functionality available in base class and only implements
* required functions:
* connectCamera - opens Java camera and sets the PreviewCallback to be delivered.
* disconnectCamera - closes the camera and stops preview.
* When frame is delivered via callback from Camera - it processed via OpenCV to be
* converted to RGBA32 and then passed to the external callback for modifications if required.
*/
public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallback {
private static final int MAGIC_TEXTURE_ID = 10;
private static final String TAG = "JavaCameraView";
private byte mBuffer[];
private Mat[] mFrameChain;
private int mChainIdx = 0;
private Thread mThread;
private boolean mStopThread;
protected Camera mCamera;
protected JavaCameraFrame[] mCameraFrame;
private SurfaceTexture mSurfaceTexture;
private int mPreviewFormat = ImageFormat.NV21;
public static class JavaCameraSizeAccessor implements ListItemAccessor {
@Override
public int getWidth(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.width;
}
@Override
public int getHeight(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.height;
}
}
public JavaCameraView(Context context, int cameraId) {
super(context, cameraId);
}
public JavaCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
}
protected boolean initializeCamera(int width, int height) {
Log.d(TAG, "Initialize java camera");
boolean result = true;
synchronized (this) {
mCamera = null;
if (mCameraIndex == CAMERA_ID_ANY) {
Log.d(TAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
int localCameraIndex = mCameraIndex;
if (mCameraIndex == CAMERA_ID_BACK) {
Log.i(TAG, "Trying to open back camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
localCameraIndex = camIdx;
break;
}
}
} else if (mCameraIndex == CAMERA_ID_FRONT) {
Log.i(TAG, "Trying to open front camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
localCameraIndex = camIdx;
break;
}
}
}
if (localCameraIndex == CAMERA_ID_BACK) {
Log.e(TAG, "Back camera not found!");
} else if (localCameraIndex == CAMERA_ID_FRONT) {
Log.e(TAG, "Front camera not found!");
} else {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")");
try {
mCamera = Camera.open(localCameraIndex);
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
}
}
}
if (mCamera == null)
return false;
/* Now set camera parameters */
try {
Camera.Parameters params = mCamera.getParameters();
Log.d(TAG, "getSupportedPreviewSizes()");
List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();
if (sizes != null) {
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height);
/* Image format NV21 causes issues in the Android emulators */
if (Build.FINGERPRINT.startsWith("generic")
|| Build.FINGERPRINT.startsWith("unknown")
|| Build.MODEL.contains("google_sdk")
|| Build.MODEL.contains("Emulator")
|| Build.MODEL.contains("Android SDK built for x86")
|| Build.MANUFACTURER.contains("Genymotion")
|| (Build.BRAND.startsWith("generic") && Build.DEVICE.startsWith("generic"))
|| "google_sdk".equals(Build.PRODUCT))
params.setPreviewFormat(ImageFormat.YV12); // "generic" or "android" = android emulator
else
params.setPreviewFormat(ImageFormat.NV21);
mPreviewFormat = params.getPreviewFormat();
Log.d(TAG, "Set preview size to " + Integer.valueOf((int)frameSize.width) + "x" + Integer.valueOf((int)frameSize.height));
params.setPreviewSize((int)frameSize.width, (int)frameSize.height);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH && !android.os.Build.MODEL.equals("GT-I9100"))
params.setRecordingHint(true);
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
params = mCamera.getParameters();
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
mScale = Math.min(((float)height)/mFrameHeight, ((float)width)/mFrameWidth);
else
mScale = 0;
if (mFpsMeter != null) {
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
int size = mFrameWidth * mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mFrameChain = new Mat[2];
mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
AllocateCache();
mCameraFrame = new JavaCameraFrame[2];
mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight);
mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
mCamera.setPreviewTexture(mSurfaceTexture);
} else
mCamera.setPreviewDisplay(null);
/* Finally we are ready to start the preview */
Log.d(TAG, "startPreview");
mCamera.startPreview();
}
else
result = false;
} catch (Exception e) {
result = false;
e.printStackTrace();
}
}
return result;
}
protected void releaseCamera() {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
}
mCamera = null;
if (mFrameChain != null) {
mFrameChain[0].release();
mFrameChain[1].release();
}
if (mCameraFrame != null) {
mCameraFrame[0].release();
mCameraFrame[1].release();
}
}
}
private boolean mCameraFrameReady = false;
@Override
protected boolean connectCamera(int width, int height) {
/* 1. We need to instantiate camera
* 2. We need to start thread which will be getting frames
*/
/* First step - initialize camera connection */
Log.d(TAG, "Connecting to camera");
if (!initializeCamera(width, height))
return false;
mCameraFrameReady = false;
/* now we can start update thread */
Log.d(TAG, "Starting processing thread");
mStopThread = false;
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
}
@Override
protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
Log.d(TAG, "Disconnecting from camera");
try {
mStopThread = true;
Log.d(TAG, "Notify thread");
synchronized (this) {
this.notify();
}
Log.d(TAG, "Waiting for thread");
if (mThread != null)
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
}
/* Now release camera */
releaseCamera();
mCameraFrameReady = false;
}
@Override
public void onPreviewFrame(byte[] frame, Camera arg1) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Preview Frame received. Frame size: " + frame.length);
synchronized (this) {
mFrameChain[mChainIdx].put(0, 0, frame);
mCameraFrameReady = true;
this.notify();
}
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class JavaCameraFrame implements CvCameraViewFrame {
@Override
public Mat gray() {
return mYuvFrameData.submat(0, mHeight, 0, mWidth);
}
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
public JavaCameraFrame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mRgba = new Mat();
}
public void release() {
mRgba.release();
}
private Mat mYuvFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
};
private class CameraWorker implements Runnable {
@Override
public void run() {
do {
boolean hasFrame = false;
synchronized (JavaCameraView.this) {
try {
while (!mCameraFrameReady && !mStopThread) {
JavaCameraView.this.wait();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
if (mCameraFrameReady)
{
mChainIdx = 1 - mChainIdx;
mCameraFrameReady = false;
hasFrame = true;
}
}
if (!mStopThread && hasFrame) {
if (!mFrameChain[1 - mChainIdx].empty())
deliverAndDrawFrame(mCameraFrame[1 - mChainIdx]);
}
} while (!mStopThread);
Log.d(TAG, "Finish processing thread");
}
}
}

@ -0,0 +1,40 @@
package org.opencv.android;
/**
* Interface for callback object in case of asynchronous initialization of OpenCV.
*/
public interface LoaderCallbackInterface
{
/**
* OpenCV initialization finished successfully.
*/
static final int SUCCESS = 0;
/**
* Google Play Market cannot be invoked.
*/
static final int MARKET_ERROR = 2;
/**
* OpenCV library installation has been canceled by the user.
*/
static final int INSTALL_CANCELED = 3;
/**
* This version of OpenCV Manager Service is incompatible with the app. Possibly, a service update is required.
*/
static final int INCOMPATIBLE_MANAGER_VERSION = 4;
/**
* OpenCV library initialization has failed.
*/
static final int INIT_FAILED = 0xff;
/**
* Callback method, called after OpenCV library initialization.
* @param status status of initialization (see initialization status constants).
*/
public void onManagerConnected(int status);
/**
* Callback method, called in case the package installation is needed.
* @param callback answer object with approve and cancel methods and the package description.
*/
public void onPackageInstall(final int operation, InstallCallbackInterface callback);
};

@ -0,0 +1,132 @@
package org.opencv.android;
import android.content.Context;
/**
* Helper class provides common initialization methods for OpenCV library.
*/
public class OpenCVLoader
{
/**
* OpenCV Library version 2.4.2.
*/
public static final String OPENCV_VERSION_2_4_2 = "2.4.2";
/**
* OpenCV Library version 2.4.3.
*/
public static final String OPENCV_VERSION_2_4_3 = "2.4.3";
/**
* OpenCV Library version 2.4.4.
*/
public static final String OPENCV_VERSION_2_4_4 = "2.4.4";
/**
* OpenCV Library version 2.4.5.
*/
public static final String OPENCV_VERSION_2_4_5 = "2.4.5";
/**
* OpenCV Library version 2.4.6.
*/
public static final String OPENCV_VERSION_2_4_6 = "2.4.6";
/**
* OpenCV Library version 2.4.7.
*/
public static final String OPENCV_VERSION_2_4_7 = "2.4.7";
/**
* OpenCV Library version 2.4.8.
*/
public static final String OPENCV_VERSION_2_4_8 = "2.4.8";
/**
* OpenCV Library version 2.4.9.
*/
public static final String OPENCV_VERSION_2_4_9 = "2.4.9";
/**
* OpenCV Library version 2.4.10.
*/
public static final String OPENCV_VERSION_2_4_10 = "2.4.10";
/**
* OpenCV Library version 2.4.11.
*/
public static final String OPENCV_VERSION_2_4_11 = "2.4.11";
/**
* OpenCV Library version 2.4.12.
*/
public static final String OPENCV_VERSION_2_4_12 = "2.4.12";
/**
* OpenCV Library version 2.4.13.
*/
public static final String OPENCV_VERSION_2_4_13 = "2.4.13";
/**
* OpenCV Library version 3.0.0.
*/
public static final String OPENCV_VERSION_3_0_0 = "3.0.0";
/**
* OpenCV Library version 3.1.0.
*/
public static final String OPENCV_VERSION_3_1_0 = "3.1.0";
/**
* OpenCV Library version 3.2.0.
*/
public static final String OPENCV_VERSION_3_2_0 = "3.2.0";
/**
* OpenCV Library version 3.3.0.
*/
public static final String OPENCV_VERSION_3_3_0 = "3.3.0";
/**
* OpenCV Library version 3.4.0.
*/
public static final String OPENCV_VERSION_3_4_0 = "3.4.0";
/**
* Current OpenCV Library version
*/
public static final String OPENCV_VERSION = "3.4.1";
/**
* Loads and initializes OpenCV library from current application package. Roughly, it's an analog of system.loadLibrary("opencv_java").
* @return Returns true is initialization of OpenCV was successful.
*/
public static boolean initDebug()
{
return StaticHelper.initOpenCV(false);
}
/**
* Loads and initializes OpenCV library from current application package. Roughly, it's an analog of system.loadLibrary("opencv_java").
* @param InitCuda load and initialize CUDA runtime libraries.
* @return Returns true is initialization of OpenCV was successful.
*/
public static boolean initDebug(boolean InitCuda)
{
return StaticHelper.initOpenCV(InitCuda);
}
/**
* Loads and initializes OpenCV library using OpenCV Engine service.
* @param Version OpenCV library version.
* @param AppContext application context for connecting to the service.
* @param Callback object, that implements LoaderCallbackInterface for handling the connection status.
* @return Returns true if initialization of OpenCV is successful.
*/
public static boolean initAsync(String Version, Context AppContext,
LoaderCallbackInterface Callback)
{
return AsyncServiceHelper.initOpenCV(Version, AppContext, Callback);
}
}

@ -0,0 +1,104 @@
package org.opencv.android;
import android.util.Log;
import org.opencv.core.Core;
import java.util.StringTokenizer;
class StaticHelper {
public static boolean initOpenCV(boolean InitCuda)
{
boolean result;
String libs = "";
if(InitCuda)
{
loadLibrary("cudart");
loadLibrary("nppc");
loadLibrary("nppi");
loadLibrary("npps");
loadLibrary("cufft");
loadLibrary("cublas");
}
Log.d(TAG, "Trying to get library list");
try
{
System.loadLibrary("opencv_info");
libs = getLibraryList();
}
catch(UnsatisfiedLinkError e)
{
Log.e(TAG, "OpenCV error: Cannot load info library for OpenCV");
}
Log.d(TAG, "Library list: \"" + libs + "\"");
Log.d(TAG, "First attempt to load libs");
if (initOpenCVLibs(libs))
{
Log.d(TAG, "First attempt to load libs is OK");
String eol = System.getProperty("line.separator");
for (String str : Core.getBuildInformation().split(eol))
Log.i(TAG, str);
result = true;
}
else
{
Log.d(TAG, "First attempt to load libs fails");
result = false;
}
return result;
}
private static boolean loadLibrary(String Name)
{
boolean result = true;
Log.d(TAG, "Trying to load library " + Name);
try
{
System.loadLibrary(Name);
Log.d(TAG, "Library " + Name + " loaded");
}
catch(UnsatisfiedLinkError e)
{
Log.d(TAG, "Cannot load library \"" + Name + "\"");
e.printStackTrace();
result = false;
}
return result;
}
private static boolean initOpenCVLibs(String Libs)
{
Log.d(TAG, "Trying to init OpenCV libs");
boolean result = true;
if ((null != Libs) && (Libs.length() != 0))
{
Log.d(TAG, "Trying to load libs by dependency list");
StringTokenizer splitter = new StringTokenizer(Libs, ";");
while(splitter.hasMoreTokens())
{
result &= loadLibrary(splitter.nextToken());
}
}
else
{
// If dependencies list is not defined or empty.
result = loadLibrary("opencv_java3");
}
return result;
}
private static final String TAG = "OpenCV/StaticHelper";
private static native String getLibraryList();
}

@ -0,0 +1,134 @@
package org.opencv.android;
import android.content.Context;
import android.graphics.Bitmap;
import org.opencv.core.CvException;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
import java.io.*;
public class Utils {
public static String exportResource(Context context, int resourceId) {
return exportResource(context, resourceId, "OpenCV_data");
}
public static String exportResource(Context context, int resourceId, String dirname) {
String fullname = context.getResources().getString(resourceId);
String resName = fullname.substring(fullname.lastIndexOf("/") + 1);
try {
InputStream is = context.getResources().openRawResource(resourceId);
File resDir = context.getDir(dirname, Context.MODE_PRIVATE);
File resFile = new File(resDir, resName);
FileOutputStream os = new FileOutputStream(resFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
return resFile.getAbsolutePath();
} catch (IOException e) {
e.printStackTrace();
throw new CvException("Failed to export resource " + resName
+ ". Exception thrown: " + e);
}
}
public static Mat loadResource(Context context, int resourceId) throws IOException
{
return loadResource(context, resourceId, -1);
}
public static Mat loadResource(Context context, int resourceId, int flags) throws IOException
{
InputStream is = context.getResources().openRawResource(resourceId);
ByteArrayOutputStream os = new ByteArrayOutputStream(is.available());
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
Mat encoded = new Mat(1, os.size(), CvType.CV_8U);
encoded.put(0, 0, os.toByteArray());
os.close();
Mat decoded = Imgcodecs.imdecode(encoded, flags);
encoded.release();
return decoded;
}
/**
* Converts Android Bitmap to OpenCV Mat.
* <p>
* This function converts an Android Bitmap image to the OpenCV Mat.
* <br>'ARGB_8888' and 'RGB_565' input Bitmap formats are supported.
* <br>The output Mat is always created of the same size as the input Bitmap and of the 'CV_8UC4' type,
* it keeps the image in RGBA format.
* <br>This function throws an exception if the conversion fails.
* @param bmp is a valid input Bitmap object of the type 'ARGB_8888' or 'RGB_565'.
* @param mat is a valid output Mat object, it will be reallocated if needed, so it may be empty.
* @param unPremultiplyAlpha is a flag, that determines, whether the bitmap needs to be converted from alpha premultiplied format (like Android keeps 'ARGB_8888' ones) to regular one; this flag is ignored for 'RGB_565' bitmaps.
*/
public static void bitmapToMat(Bitmap bmp, Mat mat, boolean unPremultiplyAlpha) {
if (bmp == null)
throw new java.lang.IllegalArgumentException("bmp == null");
if (mat == null)
throw new java.lang.IllegalArgumentException("mat == null");
nBitmapToMat2(bmp, mat.nativeObj, unPremultiplyAlpha);
}
/**
* Short form of the bitmapToMat(bmp, mat, unPremultiplyAlpha=false).
* @param bmp is a valid input Bitmap object of the type 'ARGB_8888' or 'RGB_565'.
* @param mat is a valid output Mat object, it will be reallocated if needed, so Mat may be empty.
*/
public static void bitmapToMat(Bitmap bmp, Mat mat) {
bitmapToMat(bmp, mat, false);
}
/**
* Converts OpenCV Mat to Android Bitmap.
* <p>
* <br>This function converts an image in the OpenCV Mat representation to the Android Bitmap.
* <br>The input Mat object has to be of the types 'CV_8UC1' (gray-scale), 'CV_8UC3' (RGB) or 'CV_8UC4' (RGBA).
* <br>The output Bitmap object has to be of the same size as the input Mat and of the types 'ARGB_8888' or 'RGB_565'.
* <br>This function throws an exception if the conversion fails.
*
* @param mat is a valid input Mat object of types 'CV_8UC1', 'CV_8UC3' or 'CV_8UC4'.
* @param bmp is a valid Bitmap object of the same size as the Mat and of type 'ARGB_8888' or 'RGB_565'.
* @param premultiplyAlpha is a flag, that determines, whether the Mat needs to be converted to alpha premultiplied format (like Android keeps 'ARGB_8888' bitmaps); the flag is ignored for 'RGB_565' bitmaps.
*/
public static void matToBitmap(Mat mat, Bitmap bmp, boolean premultiplyAlpha) {
if (mat == null)
throw new java.lang.IllegalArgumentException("mat == null");
if (bmp == null)
throw new java.lang.IllegalArgumentException("bmp == null");
nMatToBitmap2(mat.nativeObj, bmp, premultiplyAlpha);
}
/**
* Short form of the <b>matToBitmap(mat, bmp, premultiplyAlpha=false)</b>
* @param mat is a valid input Mat object of the types 'CV_8UC1', 'CV_8UC3' or 'CV_8UC4'.
* @param bmp is a valid Bitmap object of the same size as the Mat and of type 'ARGB_8888' or 'RGB_565'.
*/
public static void matToBitmap(Mat mat, Bitmap bmp) {
matToBitmap(mat, bmp, false);
}
private static native void nBitmapToMat2(Bitmap b, long m_addr, boolean unPremultiplyAlpha);
private static native void nMatToBitmap2(long m_addr, Bitmap b, boolean premultiplyAlpha);
}

@ -0,0 +1,332 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.calib3d;
import org.opencv.core.Rect;
// C++: class StereoBM
//javadoc: StereoBM
public class StereoBM extends StereoMatcher {
protected StereoBM(long addr) { super(addr); }
// internal usage only
public static StereoBM __fromPtr__(long addr) { return new StereoBM(addr); }
public static final int
PREFILTER_NORMALIZED_RESPONSE = 0,
PREFILTER_XSOBEL = 1;
//
// C++: static Ptr_StereoBM create(int numDisparities = 0, int blockSize = 21)
//
//javadoc: StereoBM::create(numDisparities, blockSize)
public static StereoBM create(int numDisparities, int blockSize)
{
StereoBM retVal = StereoBM.__fromPtr__(create_0(numDisparities, blockSize));
return retVal;
}
//javadoc: StereoBM::create()
public static StereoBM create()
{
StereoBM retVal = StereoBM.__fromPtr__(create_1());
return retVal;
}
//
// C++: Rect getROI1()
//
//javadoc: StereoBM::getROI1()
public Rect getROI1()
{
Rect retVal = new Rect(getROI1_0(nativeObj));
return retVal;
}
//
// C++: Rect getROI2()
//
//javadoc: StereoBM::getROI2()
public Rect getROI2()
{
Rect retVal = new Rect(getROI2_0(nativeObj));
return retVal;
}
//
// C++: int getPreFilterCap()
//
//javadoc: StereoBM::getPreFilterCap()
public int getPreFilterCap()
{
int retVal = getPreFilterCap_0(nativeObj);
return retVal;
}
//
// C++: int getPreFilterSize()
//
//javadoc: StereoBM::getPreFilterSize()
public int getPreFilterSize()
{
int retVal = getPreFilterSize_0(nativeObj);
return retVal;
}
//
// C++: int getPreFilterType()
//
//javadoc: StereoBM::getPreFilterType()
public int getPreFilterType()
{
int retVal = getPreFilterType_0(nativeObj);
return retVal;
}
//
// C++: int getSmallerBlockSize()
//
//javadoc: StereoBM::getSmallerBlockSize()
public int getSmallerBlockSize()
{
int retVal = getSmallerBlockSize_0(nativeObj);
return retVal;
}
//
// C++: int getTextureThreshold()
//
//javadoc: StereoBM::getTextureThreshold()
public int getTextureThreshold()
{
int retVal = getTextureThreshold_0(nativeObj);
return retVal;
}
//
// C++: int getUniquenessRatio()
//
//javadoc: StereoBM::getUniquenessRatio()
public int getUniquenessRatio()
{
int retVal = getUniquenessRatio_0(nativeObj);
return retVal;
}
//
// C++: void setPreFilterCap(int preFilterCap)
//
//javadoc: StereoBM::setPreFilterCap(preFilterCap)
public void setPreFilterCap(int preFilterCap)
{
setPreFilterCap_0(nativeObj, preFilterCap);
return;
}
//
// C++: void setPreFilterSize(int preFilterSize)
//
//javadoc: StereoBM::setPreFilterSize(preFilterSize)
public void setPreFilterSize(int preFilterSize)
{
setPreFilterSize_0(nativeObj, preFilterSize);
return;
}
//
// C++: void setPreFilterType(int preFilterType)
//
//javadoc: StereoBM::setPreFilterType(preFilterType)
public void setPreFilterType(int preFilterType)
{
setPreFilterType_0(nativeObj, preFilterType);
return;
}
//
// C++: void setROI1(Rect roi1)
//
//javadoc: StereoBM::setROI1(roi1)
public void setROI1(Rect roi1)
{
setROI1_0(nativeObj, roi1.x, roi1.y, roi1.width, roi1.height);
return;
}
//
// C++: void setROI2(Rect roi2)
//
//javadoc: StereoBM::setROI2(roi2)
public void setROI2(Rect roi2)
{
setROI2_0(nativeObj, roi2.x, roi2.y, roi2.width, roi2.height);
return;
}
//
// C++: void setSmallerBlockSize(int blockSize)
//
//javadoc: StereoBM::setSmallerBlockSize(blockSize)
public void setSmallerBlockSize(int blockSize)
{
setSmallerBlockSize_0(nativeObj, blockSize);
return;
}
//
// C++: void setTextureThreshold(int textureThreshold)
//
//javadoc: StereoBM::setTextureThreshold(textureThreshold)
public void setTextureThreshold(int textureThreshold)
{
setTextureThreshold_0(nativeObj, textureThreshold);
return;
}
//
// C++: void setUniquenessRatio(int uniquenessRatio)
//
//javadoc: StereoBM::setUniquenessRatio(uniquenessRatio)
public void setUniquenessRatio(int uniquenessRatio)
{
setUniquenessRatio_0(nativeObj, uniquenessRatio);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: static Ptr_StereoBM create(int numDisparities = 0, int blockSize = 21)
private static native long create_0(int numDisparities, int blockSize);
private static native long create_1();
// C++: Rect getROI1()
private static native double[] getROI1_0(long nativeObj);
// C++: Rect getROI2()
private static native double[] getROI2_0(long nativeObj);
// C++: int getPreFilterCap()
private static native int getPreFilterCap_0(long nativeObj);
// C++: int getPreFilterSize()
private static native int getPreFilterSize_0(long nativeObj);
// C++: int getPreFilterType()
private static native int getPreFilterType_0(long nativeObj);
// C++: int getSmallerBlockSize()
private static native int getSmallerBlockSize_0(long nativeObj);
// C++: int getTextureThreshold()
private static native int getTextureThreshold_0(long nativeObj);
// C++: int getUniquenessRatio()
private static native int getUniquenessRatio_0(long nativeObj);
// C++: void setPreFilterCap(int preFilterCap)
private static native void setPreFilterCap_0(long nativeObj, int preFilterCap);
// C++: void setPreFilterSize(int preFilterSize)
private static native void setPreFilterSize_0(long nativeObj, int preFilterSize);
// C++: void setPreFilterType(int preFilterType)
private static native void setPreFilterType_0(long nativeObj, int preFilterType);
// C++: void setROI1(Rect roi1)
private static native void setROI1_0(long nativeObj, int roi1_x, int roi1_y, int roi1_width, int roi1_height);
// C++: void setROI2(Rect roi2)
private static native void setROI2_0(long nativeObj, int roi2_x, int roi2_y, int roi2_width, int roi2_height);
// C++: void setSmallerBlockSize(int blockSize)
private static native void setSmallerBlockSize_0(long nativeObj, int blockSize);
// C++: void setTextureThreshold(int textureThreshold)
private static native void setTextureThreshold_0(long nativeObj, int textureThreshold);
// C++: void setUniquenessRatio(int uniquenessRatio)
private static native void setUniquenessRatio_0(long nativeObj, int uniquenessRatio);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -0,0 +1,255 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.calib3d;
import org.opencv.core.Algorithm;
import org.opencv.core.Mat;
// C++: class StereoMatcher
//javadoc: StereoMatcher
public class StereoMatcher extends Algorithm {
protected StereoMatcher(long addr) { super(addr); }
// internal usage only
public static StereoMatcher __fromPtr__(long addr) { return new StereoMatcher(addr); }
public static final int
DISP_SHIFT = 4,
DISP_SCALE = (1 << DISP_SHIFT);
//
// C++: int getBlockSize()
//
//javadoc: StereoMatcher::getBlockSize()
public int getBlockSize()
{
int retVal = getBlockSize_0(nativeObj);
return retVal;
}
//
// C++: int getDisp12MaxDiff()
//
//javadoc: StereoMatcher::getDisp12MaxDiff()
public int getDisp12MaxDiff()
{
int retVal = getDisp12MaxDiff_0(nativeObj);
return retVal;
}
//
// C++: int getMinDisparity()
//
//javadoc: StereoMatcher::getMinDisparity()
public int getMinDisparity()
{
int retVal = getMinDisparity_0(nativeObj);
return retVal;
}
//
// C++: int getNumDisparities()
//
//javadoc: StereoMatcher::getNumDisparities()
public int getNumDisparities()
{
int retVal = getNumDisparities_0(nativeObj);
return retVal;
}
//
// C++: int getSpeckleRange()
//
//javadoc: StereoMatcher::getSpeckleRange()
public int getSpeckleRange()
{
int retVal = getSpeckleRange_0(nativeObj);
return retVal;
}
//
// C++: int getSpeckleWindowSize()
//
//javadoc: StereoMatcher::getSpeckleWindowSize()
public int getSpeckleWindowSize()
{
int retVal = getSpeckleWindowSize_0(nativeObj);
return retVal;
}
//
// C++: void compute(Mat left, Mat right, Mat& disparity)
//
//javadoc: StereoMatcher::compute(left, right, disparity)
public void compute(Mat left, Mat right, Mat disparity)
{
compute_0(nativeObj, left.nativeObj, right.nativeObj, disparity.nativeObj);
return;
}
//
// C++: void setBlockSize(int blockSize)
//
//javadoc: StereoMatcher::setBlockSize(blockSize)
public void setBlockSize(int blockSize)
{
setBlockSize_0(nativeObj, blockSize);
return;
}
//
// C++: void setDisp12MaxDiff(int disp12MaxDiff)
//
//javadoc: StereoMatcher::setDisp12MaxDiff(disp12MaxDiff)
public void setDisp12MaxDiff(int disp12MaxDiff)
{
setDisp12MaxDiff_0(nativeObj, disp12MaxDiff);
return;
}
//
// C++: void setMinDisparity(int minDisparity)
//
//javadoc: StereoMatcher::setMinDisparity(minDisparity)
public void setMinDisparity(int minDisparity)
{
setMinDisparity_0(nativeObj, minDisparity);
return;
}
//
// C++: void setNumDisparities(int numDisparities)
//
//javadoc: StereoMatcher::setNumDisparities(numDisparities)
public void setNumDisparities(int numDisparities)
{
setNumDisparities_0(nativeObj, numDisparities);
return;
}
//
// C++: void setSpeckleRange(int speckleRange)
//
//javadoc: StereoMatcher::setSpeckleRange(speckleRange)
public void setSpeckleRange(int speckleRange)
{
setSpeckleRange_0(nativeObj, speckleRange);
return;
}
//
// C++: void setSpeckleWindowSize(int speckleWindowSize)
//
//javadoc: StereoMatcher::setSpeckleWindowSize(speckleWindowSize)
public void setSpeckleWindowSize(int speckleWindowSize)
{
setSpeckleWindowSize_0(nativeObj, speckleWindowSize);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: int getBlockSize()
private static native int getBlockSize_0(long nativeObj);
// C++: int getDisp12MaxDiff()
private static native int getDisp12MaxDiff_0(long nativeObj);
// C++: int getMinDisparity()
private static native int getMinDisparity_0(long nativeObj);
// C++: int getNumDisparities()
private static native int getNumDisparities_0(long nativeObj);
// C++: int getSpeckleRange()
private static native int getSpeckleRange_0(long nativeObj);
// C++: int getSpeckleWindowSize()
private static native int getSpeckleWindowSize_0(long nativeObj);
// C++: void compute(Mat left, Mat right, Mat& disparity)
private static native void compute_0(long nativeObj, long left_nativeObj, long right_nativeObj, long disparity_nativeObj);
// C++: void setBlockSize(int blockSize)
private static native void setBlockSize_0(long nativeObj, int blockSize);
// C++: void setDisp12MaxDiff(int disp12MaxDiff)
private static native void setDisp12MaxDiff_0(long nativeObj, int disp12MaxDiff);
// C++: void setMinDisparity(int minDisparity)
private static native void setMinDisparity_0(long nativeObj, int minDisparity);
// C++: void setNumDisparities(int numDisparities)
private static native void setNumDisparities_0(long nativeObj, int numDisparities);
// C++: void setSpeckleRange(int speckleRange)
private static native void setSpeckleRange_0(long nativeObj, int speckleRange);
// C++: void setSpeckleWindowSize(int speckleWindowSize)
private static native void setSpeckleWindowSize_0(long nativeObj, int speckleWindowSize);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -0,0 +1,230 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.calib3d;
// C++: class StereoSGBM
//javadoc: StereoSGBM
public class StereoSGBM extends StereoMatcher {
protected StereoSGBM(long addr) { super(addr); }
// internal usage only
public static StereoSGBM __fromPtr__(long addr) { return new StereoSGBM(addr); }
public static final int
MODE_SGBM = 0,
MODE_HH = 1,
MODE_SGBM_3WAY = 2,
MODE_HH4 = 3;
//
// C++: static Ptr_StereoSGBM create(int minDisparity = 0, int numDisparities = 16, int blockSize = 3, int P1 = 0, int P2 = 0, int disp12MaxDiff = 0, int preFilterCap = 0, int uniquenessRatio = 0, int speckleWindowSize = 0, int speckleRange = 0, int mode = StereoSGBM::MODE_SGBM)
//
//javadoc: StereoSGBM::create(minDisparity, numDisparities, blockSize, P1, P2, disp12MaxDiff, preFilterCap, uniquenessRatio, speckleWindowSize, speckleRange, mode)
public static StereoSGBM create(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio, int speckleWindowSize, int speckleRange, int mode)
{
StereoSGBM retVal = StereoSGBM.__fromPtr__(create_0(minDisparity, numDisparities, blockSize, P1, P2, disp12MaxDiff, preFilterCap, uniquenessRatio, speckleWindowSize, speckleRange, mode));
return retVal;
}
//javadoc: StereoSGBM::create()
public static StereoSGBM create()
{
StereoSGBM retVal = StereoSGBM.__fromPtr__(create_1());
return retVal;
}
//
// C++: int getMode()
//
//javadoc: StereoSGBM::getMode()
public int getMode()
{
int retVal = getMode_0(nativeObj);
return retVal;
}
//
// C++: int getP1()
//
//javadoc: StereoSGBM::getP1()
public int getP1()
{
int retVal = getP1_0(nativeObj);
return retVal;
}
//
// C++: int getP2()
//
//javadoc: StereoSGBM::getP2()
public int getP2()
{
int retVal = getP2_0(nativeObj);
return retVal;
}
//
// C++: int getPreFilterCap()
//
//javadoc: StereoSGBM::getPreFilterCap()
public int getPreFilterCap()
{
int retVal = getPreFilterCap_0(nativeObj);
return retVal;
}
//
// C++: int getUniquenessRatio()
//
//javadoc: StereoSGBM::getUniquenessRatio()
public int getUniquenessRatio()
{
int retVal = getUniquenessRatio_0(nativeObj);
return retVal;
}
//
// C++: void setMode(int mode)
//
//javadoc: StereoSGBM::setMode(mode)
public void setMode(int mode)
{
setMode_0(nativeObj, mode);
return;
}
//
// C++: void setP1(int P1)
//
//javadoc: StereoSGBM::setP1(P1)
public void setP1(int P1)
{
setP1_0(nativeObj, P1);
return;
}
//
// C++: void setP2(int P2)
//
//javadoc: StereoSGBM::setP2(P2)
public void setP2(int P2)
{
setP2_0(nativeObj, P2);
return;
}
//
// C++: void setPreFilterCap(int preFilterCap)
//
//javadoc: StereoSGBM::setPreFilterCap(preFilterCap)
public void setPreFilterCap(int preFilterCap)
{
setPreFilterCap_0(nativeObj, preFilterCap);
return;
}
//
// C++: void setUniquenessRatio(int uniquenessRatio)
//
//javadoc: StereoSGBM::setUniquenessRatio(uniquenessRatio)
public void setUniquenessRatio(int uniquenessRatio)
{
setUniquenessRatio_0(nativeObj, uniquenessRatio);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: static Ptr_StereoSGBM create(int minDisparity = 0, int numDisparities = 16, int blockSize = 3, int P1 = 0, int P2 = 0, int disp12MaxDiff = 0, int preFilterCap = 0, int uniquenessRatio = 0, int speckleWindowSize = 0, int speckleRange = 0, int mode = StereoSGBM::MODE_SGBM)
private static native long create_0(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio, int speckleWindowSize, int speckleRange, int mode);
private static native long create_1();
// C++: int getMode()
private static native int getMode_0(long nativeObj);
// C++: int getP1()
private static native int getP1_0(long nativeObj);
// C++: int getP2()
private static native int getP2_0(long nativeObj);
// C++: int getPreFilterCap()
private static native int getPreFilterCap_0(long nativeObj);
// C++: int getUniquenessRatio()
private static native int getUniquenessRatio_0(long nativeObj);
// C++: void setMode(int mode)
private static native void setMode_0(long nativeObj, int mode);
// C++: void setP1(int P1)
private static native void setP1_0(long nativeObj, int P1);
// C++: void setP2(int P2)
private static native void setP2_0(long nativeObj, int P2);
// C++: void setPreFilterCap(int preFilterCap)
private static native void setPreFilterCap_0(long nativeObj, int preFilterCap);
// C++: void setUniquenessRatio(int uniquenessRatio)
private static native void setUniquenessRatio_0(long nativeObj, int uniquenessRatio);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -0,0 +1,111 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.core;
// C++: class Algorithm
//javadoc: Algorithm
public class Algorithm {
protected final long nativeObj;
protected Algorithm(long addr) { nativeObj = addr; }
public long getNativeObjAddr() { return nativeObj; }
// internal usage only
public static Algorithm __fromPtr__(long addr) { return new Algorithm(addr); }
//
// C++: String getDefaultName()
//
//javadoc: Algorithm::getDefaultName()
public String getDefaultName()
{
String retVal = getDefaultName_0(nativeObj);
return retVal;
}
//
// C++: bool empty()
//
//javadoc: Algorithm::empty()
public boolean empty()
{
boolean retVal = empty_0(nativeObj);
return retVal;
}
//
// C++: void clear()
//
//javadoc: Algorithm::clear()
public void clear()
{
clear_0(nativeObj);
return;
}
//
// C++: void read(FileNode fn)
//
// Unknown type 'FileNode' (I), skipping the function
//
// C++: void save(String filename)
//
//javadoc: Algorithm::save(filename)
public void save(String filename)
{
save_0(nativeObj, filename);
return;
}
//
// C++: void write(Ptr_FileStorage fs, String name = String())
//
// Unknown type 'Ptr_FileStorage' (I), skipping the function
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: String getDefaultName()
private static native String getDefaultName_0(long nativeObj);
// C++: bool empty()
private static native boolean empty_0(long nativeObj);
// C++: void clear()
private static native void clear_0(long nativeObj);
// C++: void save(String filename)
private static native void save_0(long nativeObj, String filename);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -0,0 +1,15 @@
package org.opencv.core;
public class CvException extends RuntimeException {
private static final long serialVersionUID = 1L;
public CvException(String msg) {
super(msg);
}
@Override
public String toString() {
return "CvException [" + super.toString() + "]";
}
}

@ -0,0 +1,136 @@
package org.opencv.core;
public final class CvType {
// type depth constants
public static final int
CV_8U = 0, CV_8S = 1,
CV_16U = 2, CV_16S = 3,
CV_32S = 4,
CV_32F = 5,
CV_64F = 6,
CV_USRTYPE1 = 7;
// predefined type constants
public static final int
CV_8UC1 = CV_8UC(1), CV_8UC2 = CV_8UC(2), CV_8UC3 = CV_8UC(3), CV_8UC4 = CV_8UC(4),
CV_8SC1 = CV_8SC(1), CV_8SC2 = CV_8SC(2), CV_8SC3 = CV_8SC(3), CV_8SC4 = CV_8SC(4),
CV_16UC1 = CV_16UC(1), CV_16UC2 = CV_16UC(2), CV_16UC3 = CV_16UC(3), CV_16UC4 = CV_16UC(4),
CV_16SC1 = CV_16SC(1), CV_16SC2 = CV_16SC(2), CV_16SC3 = CV_16SC(3), CV_16SC4 = CV_16SC(4),
CV_32SC1 = CV_32SC(1), CV_32SC2 = CV_32SC(2), CV_32SC3 = CV_32SC(3), CV_32SC4 = CV_32SC(4),
CV_32FC1 = CV_32FC(1), CV_32FC2 = CV_32FC(2), CV_32FC3 = CV_32FC(3), CV_32FC4 = CV_32FC(4),
CV_64FC1 = CV_64FC(1), CV_64FC2 = CV_64FC(2), CV_64FC3 = CV_64FC(3), CV_64FC4 = CV_64FC(4);
private static final int CV_CN_MAX = 512, CV_CN_SHIFT = 3, CV_DEPTH_MAX = (1 << CV_CN_SHIFT);
public static final int makeType(int depth, int channels) {
if (channels <= 0 || channels >= CV_CN_MAX) {
throw new java.lang.UnsupportedOperationException(
"Channels count should be 1.." + (CV_CN_MAX - 1));
}
if (depth < 0 || depth >= CV_DEPTH_MAX) {
throw new java.lang.UnsupportedOperationException(
"Data type depth should be 0.." + (CV_DEPTH_MAX - 1));
}
return (depth & (CV_DEPTH_MAX - 1)) + ((channels - 1) << CV_CN_SHIFT);
}
public static final int CV_8UC(int ch) {
return makeType(CV_8U, ch);
}
public static final int CV_8SC(int ch) {
return makeType(CV_8S, ch);
}
public static final int CV_16UC(int ch) {
return makeType(CV_16U, ch);
}
public static final int CV_16SC(int ch) {
return makeType(CV_16S, ch);
}
public static final int CV_32SC(int ch) {
return makeType(CV_32S, ch);
}
public static final int CV_32FC(int ch) {
return makeType(CV_32F, ch);
}
public static final int CV_64FC(int ch) {
return makeType(CV_64F, ch);
}
public static final int channels(int type) {
return (type >> CV_CN_SHIFT) + 1;
}
public static final int depth(int type) {
return type & (CV_DEPTH_MAX - 1);
}
public static final boolean isInteger(int type) {
return depth(type) < CV_32F;
}
public static final int ELEM_SIZE(int type) {
switch (depth(type)) {
case CV_8U:
case CV_8S:
return channels(type);
case CV_16U:
case CV_16S:
return 2 * channels(type);
case CV_32S:
case CV_32F:
return 4 * channels(type);
case CV_64F:
return 8 * channels(type);
default:
throw new java.lang.UnsupportedOperationException(
"Unsupported CvType value: " + type);
}
}
public static final String typeToString(int type) {
String s;
switch (depth(type)) {
case CV_8U:
s = "CV_8U";
break;
case CV_8S:
s = "CV_8S";
break;
case CV_16U:
s = "CV_16U";
break;
case CV_16S:
s = "CV_16S";
break;
case CV_32S:
s = "CV_32S";
break;
case CV_32F:
s = "CV_32F";
break;
case CV_64F:
s = "CV_64F";
break;
case CV_USRTYPE1:
s = "CV_USRTYPE1";
break;
default:
throw new java.lang.UnsupportedOperationException(
"Unsupported CvType value: " + type);
}
int ch = channels(type);
if (ch <= 4)
return s + "C" + ch;
else
return s + "C(" + ch + ")";
}
}

@ -0,0 +1,58 @@
package org.opencv.core;
//C++: class DMatch
/**
* Structure for matching: query descriptor index, train descriptor index, train
* image index and distance between descriptors.
*/
public class DMatch {
/**
* Query descriptor index.
*/
public int queryIdx;
/**
* Train descriptor index.
*/
public int trainIdx;
/**
* Train image index.
*/
public int imgIdx;
// javadoc: DMatch::distance
public float distance;
// javadoc: DMatch::DMatch()
public DMatch() {
this(-1, -1, Float.MAX_VALUE);
}
// javadoc: DMatch::DMatch(_queryIdx, _trainIdx, _distance)
public DMatch(int _queryIdx, int _trainIdx, float _distance) {
queryIdx = _queryIdx;
trainIdx = _trainIdx;
imgIdx = -1;
distance = _distance;
}
// javadoc: DMatch::DMatch(_queryIdx, _trainIdx, _imgIdx, _distance)
public DMatch(int _queryIdx, int _trainIdx, int _imgIdx, float _distance) {
queryIdx = _queryIdx;
trainIdx = _trainIdx;
imgIdx = _imgIdx;
distance = _distance;
}
public boolean lessThan(DMatch it) {
return distance < it.distance;
}
@Override
public String toString() {
return "DMatch [queryIdx=" + queryIdx + ", trainIdx=" + trainIdx
+ ", imgIdx=" + imgIdx + ", distance=" + distance + "]";
}
}

@ -0,0 +1,81 @@
package org.opencv.core;
//javadoc: KeyPoint
public class KeyPoint {
/**
* Coordinates of the keypoint.
*/
public Point pt;
/**
* Diameter of the useful keypoint adjacent area.
*/
public float size;
/**
* Computed orientation of the keypoint (-1 if not applicable).
*/
public float angle;
/**
* The response, by which the strongest keypoints have been selected. Can
* be used for further sorting or subsampling.
*/
public float response;
/**
* Octave (pyramid layer), from which the keypoint has been extracted.
*/
public int octave;
/**
* Object ID, that can be used to cluster keypoints by an object they
* belong to.
*/
public int class_id;
// javadoc:KeyPoint::KeyPoint(x,y,_size,_angle,_response,_octave,_class_id)
public KeyPoint(float x, float y, float _size, float _angle, float _response, int _octave, int _class_id)
{
pt = new Point(x, y);
size = _size;
angle = _angle;
response = _response;
octave = _octave;
class_id = _class_id;
}
// javadoc: KeyPoint::KeyPoint()
public KeyPoint()
{
this(0, 0, 0, -1, 0, 0, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size, _angle, _response, _octave)
public KeyPoint(float x, float y, float _size, float _angle, float _response, int _octave)
{
this(x, y, _size, _angle, _response, _octave, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size, _angle, _response)
public KeyPoint(float x, float y, float _size, float _angle, float _response)
{
this(x, y, _size, _angle, _response, 0, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size, _angle)
public KeyPoint(float x, float y, float _size, float _angle)
{
this(x, y, _size, _angle, 0, 0, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size)
public KeyPoint(float x, float y, float _size)
{
this(x, y, _size, -1, 0, 0, -1);
}
@Override
public String toString() {
return "KeyPoint [pt=" + pt + ", size=" + size + ", angle=" + angle
+ ", response=" + response + ", octave=" + octave
+ ", class_id=" + class_id + "]";
}
}

@ -0,0 +1,98 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfByte extends Mat {
// 8UC(x)
private static final int _depth = CvType.CV_8U;
private static final int _channels = 1;
public MatOfByte() {
super();
}
protected MatOfByte(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfByte fromNativeAddr(long addr) {
return new MatOfByte(addr);
}
public MatOfByte(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfByte(byte...a) {
super();
fromArray(a);
}
public MatOfByte(int offset, int length, byte...a) {
super();
fromArray(offset, length, a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(byte...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public void fromArray(int offset, int length, byte...a) {
if (offset < 0)
throw new IllegalArgumentException("offset < 0");
if (a == null)
throw new NullPointerException();
if (length < 0 || length + offset > a.length)
throw new IllegalArgumentException("invalid 'length' parameter: " + Integer.toString(length));
if (a.length == 0)
return;
int num = length / _channels;
alloc(num);
put(0, 0, a, offset, length); //TODO: check ret val!
}
public byte[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
byte[] a = new byte[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Byte> lb) {
if(lb==null || lb.size()==0)
return;
Byte ab[] = lb.toArray(new Byte[0]);
byte a[] = new byte[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Byte> toList() {
byte[] a = toArray();
Byte ab[] = new Byte[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -0,0 +1,81 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfDMatch extends Mat {
// 32FC4
private static final int _depth = CvType.CV_32F;
private static final int _channels = 4;
public MatOfDMatch() {
super();
}
protected MatOfDMatch(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat: " + toString());
//FIXME: do we need release() here?
}
public static MatOfDMatch fromNativeAddr(long addr) {
return new MatOfDMatch(addr);
}
public MatOfDMatch(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat: " + toString());
//FIXME: do we need release() here?
}
public MatOfDMatch(DMatch...ap) {
super();
fromArray(ap);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(DMatch...a) {
if(a==null || a.length==0)
return;
int num = a.length;
alloc(num);
float buff[] = new float[num * _channels];
for(int i=0; i<num; i++) {
DMatch m = a[i];
buff[_channels*i+0] = m.queryIdx;
buff[_channels*i+1] = m.trainIdx;
buff[_channels*i+2] = m.imgIdx;
buff[_channels*i+3] = m.distance;
}
put(0, 0, buff); //TODO: check ret val!
}
public DMatch[] toArray() {
int num = (int) total();
DMatch[] a = new DMatch[num];
if(num == 0)
return a;
float buff[] = new float[num * _channels];
get(0, 0, buff); //TODO: check ret val!
for(int i=0; i<num; i++)
a[i] = new DMatch((int) buff[_channels*i+0], (int) buff[_channels*i+1], (int) buff[_channels*i+2], buff[_channels*i+3]);
return a;
}
public void fromList(List<DMatch> ldm) {
DMatch adm[] = ldm.toArray(new DMatch[0]);
fromArray(adm);
}
public List<DMatch> toList() {
DMatch[] adm = toArray();
return Arrays.asList(adm);
}
}

@ -0,0 +1,79 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfDouble extends Mat {
// 64FC(x)
private static final int _depth = CvType.CV_64F;
private static final int _channels = 1;
public MatOfDouble() {
super();
}
protected MatOfDouble(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfDouble fromNativeAddr(long addr) {
return new MatOfDouble(addr);
}
public MatOfDouble(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfDouble(double...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(double...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public double[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
double[] a = new double[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Double> lb) {
if(lb==null || lb.size()==0)
return;
Double ab[] = lb.toArray(new Double[0]);
double a[] = new double[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Double> toList() {
double[] a = toArray();
Double ab[] = new Double[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -0,0 +1,79 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfFloat extends Mat {
// 32FC1
private static final int _depth = CvType.CV_32F;
private static final int _channels = 1;
public MatOfFloat() {
super();
}
protected MatOfFloat(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfFloat fromNativeAddr(long addr) {
return new MatOfFloat(addr);
}
public MatOfFloat(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfFloat(float...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(float...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public float[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
float[] a = new float[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Float> lb) {
if(lb==null || lb.size()==0)
return;
Float ab[] = lb.toArray(new Float[0]);
float a[] = new float[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Float> toList() {
float[] a = toArray();
Float ab[] = new Float[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -0,0 +1,79 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfFloat4 extends Mat {
// 32FC4
private static final int _depth = CvType.CV_32F;
private static final int _channels = 4;
public MatOfFloat4() {
super();
}
protected MatOfFloat4(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfFloat4 fromNativeAddr(long addr) {
return new MatOfFloat4(addr);
}
public MatOfFloat4(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfFloat4(float...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(float...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public float[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
float[] a = new float[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Float> lb) {
if(lb==null || lb.size()==0)
return;
Float ab[] = lb.toArray(new Float[0]);
float a[] = new float[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Float> toList() {
float[] a = toArray();
Float ab[] = new Float[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -0,0 +1,79 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfFloat6 extends Mat {
// 32FC6
private static final int _depth = CvType.CV_32F;
private static final int _channels = 6;
public MatOfFloat6() {
super();
}
protected MatOfFloat6(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfFloat6 fromNativeAddr(long addr) {
return new MatOfFloat6(addr);
}
public MatOfFloat6(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfFloat6(float...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(float...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public float[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
float[] a = new float[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Float> lb) {
if(lb==null || lb.size()==0)
return;
Float ab[] = lb.toArray(new Float[0]);
float a[] = new float[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Float> toList() {
float[] a = toArray();
Float ab[] = new Float[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -0,0 +1,80 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfInt extends Mat {
// 32SC1
private static final int _depth = CvType.CV_32S;
private static final int _channels = 1;
public MatOfInt() {
super();
}
protected MatOfInt(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfInt fromNativeAddr(long addr) {
return new MatOfInt(addr);
}
public MatOfInt(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfInt(int...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(int...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public int[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
int[] a = new int[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Integer> lb) {
if(lb==null || lb.size()==0)
return;
Integer ab[] = lb.toArray(new Integer[0]);
int a[] = new int[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Integer> toList() {
int[] a = toArray();
Integer ab[] = new Integer[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -0,0 +1,80 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfInt4 extends Mat {
// 32SC4
private static final int _depth = CvType.CV_32S;
private static final int _channels = 4;
public MatOfInt4() {
super();
}
protected MatOfInt4(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfInt4 fromNativeAddr(long addr) {
return new MatOfInt4(addr);
}
public MatOfInt4(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfInt4(int...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(int...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public int[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
int[] a = new int[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Integer> lb) {
if(lb==null || lb.size()==0)
return;
Integer ab[] = lb.toArray(new Integer[0]);
int a[] = new int[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Integer> toList() {
int[] a = toArray();
Integer ab[] = new Integer[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -0,0 +1,84 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfKeyPoint extends Mat {
// 32FC7
private static final int _depth = CvType.CV_32F;
private static final int _channels = 7;
public MatOfKeyPoint() {
super();
}
protected MatOfKeyPoint(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfKeyPoint fromNativeAddr(long addr) {
return new MatOfKeyPoint(addr);
}
public MatOfKeyPoint(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfKeyPoint(KeyPoint...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(KeyPoint...a) {
if(a==null || a.length==0)
return;
int num = a.length;
alloc(num);
float buff[] = new float[num * _channels];
for(int i=0; i<num; i++) {
KeyPoint kp = a[i];
buff[_channels*i+0] = (float) kp.pt.x;
buff[_channels*i+1] = (float) kp.pt.y;
buff[_channels*i+2] = kp.size;
buff[_channels*i+3] = kp.angle;
buff[_channels*i+4] = kp.response;
buff[_channels*i+5] = kp.octave;
buff[_channels*i+6] = kp.class_id;
}
put(0, 0, buff); //TODO: check ret val!
}
public KeyPoint[] toArray() {
int num = (int) total();
KeyPoint[] a = new KeyPoint[num];
if(num == 0)
return a;
float buff[] = new float[num * _channels];
get(0, 0, buff); //TODO: check ret val!
for(int i=0; i<num; i++)
a[i] = new KeyPoint( buff[_channels*i+0], buff[_channels*i+1], buff[_channels*i+2], buff[_channels*i+3],
buff[_channels*i+4], (int) buff[_channels*i+5], (int) buff[_channels*i+6] );
return a;
}
public void fromList(List<KeyPoint> lkp) {
KeyPoint akp[] = lkp.toArray(new KeyPoint[0]);
fromArray(akp);
}
public List<KeyPoint> toList() {
KeyPoint[] akp = toArray();
return Arrays.asList(akp);
}
}

@ -0,0 +1,78 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfPoint extends Mat {
// 32SC2
private static final int _depth = CvType.CV_32S;
private static final int _channels = 2;
public MatOfPoint() {
super();
}
protected MatOfPoint(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfPoint fromNativeAddr(long addr) {
return new MatOfPoint(addr);
}
public MatOfPoint(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfPoint(Point...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(Point...a) {
if(a==null || a.length==0)
return;
int num = a.length;
alloc(num);
int buff[] = new int[num * _channels];
for(int i=0; i<num; i++) {
Point p = a[i];
buff[_channels*i+0] = (int) p.x;
buff[_channels*i+1] = (int) p.y;
}
put(0, 0, buff); //TODO: check ret val!
}
public Point[] toArray() {
int num = (int) total();
Point[] ap = new Point[num];
if(num == 0)
return ap;
int buff[] = new int[num * _channels];
get(0, 0, buff); //TODO: check ret val!
for(int i=0; i<num; i++)
ap[i] = new Point(buff[i*_channels], buff[i*_channels+1]);
return ap;
}
public void fromList(List<Point> lp) {
Point ap[] = lp.toArray(new Point[0]);
fromArray(ap);
}
public List<Point> toList() {
Point[] ap = toArray();
return Arrays.asList(ap);
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save