I have driven Erel to distraction looking for a means to automatically turn the torch on when scanning barcodes in poor light conditions, see:
https://www.b4x.com/android/forum/threads/equivalent-of-b4a-phone-phonesensors-type_light.62181/
One of my many shortcomings is an abject lack of any Objective C expertise (one of the reasons I'm using B4A/B4I) - however that does not mean I'm not up for a challenge in it.
I have managed to dig up some Objective C code that looks like it might solve the problem:
https://gist.github.com/JoppeSchwartz/72f3efd4ac0b5456dd8a#file-measurebrightness-m-L1
I have spent the last 2 days toying with this and seem to have managed to get it working inside a minimal B4I life support system:
When I say "seem to have managed to get it working" what I really mean is that it doesn't fall over, makes an appropriate camera shutter sound when you use it and gives some sort of reading.
What I am not so confident about is the actual readings - they seem to generally reflect the light conditions but can produce identical readings in slightly different light conditions and in some cases can be negative.
I would really appreciate anyone with a good Objective C background to have a look at this - if they also understand AVFoundation and AVCapture... stuff that would probably be useful.
I have appended the code as a zip below.
Thanks...
https://www.b4x.com/android/forum/threads/equivalent-of-b4a-phone-phonesensors-type_light.62181/
One of my many shortcomings is an abject lack of any Objective C expertise (one of the reasons I'm using B4A/B4I) - however that does not mean I'm not up for a challenge in it.
I have managed to dig up some Objective C code that looks like it might solve the problem:
https://gist.github.com/JoppeSchwartz/72f3efd4ac0b5456dd8a#file-measurebrightness-m-L1
I have spent the last 2 days toying with this and seem to have managed to get it working inside a minimal B4I life support system:
B4X:
#Region Project Attributes
#ApplicationLabel: Brightness
#Version: 1.0.0
'Orientation possible values: Portrait, LandscapeLeft, LandscapeRight and PortraitUpsideDown
#iPhoneOrientations: Portrait, LandscapeLeft, LandscapeRight
#iPadOrientations: Portrait, LandscapeLeft, LandscapeRight, PortraitUpsideDown
#End Region
Sub Process_Globals
Public App As Application
Public NavControl As NavigationController
Private Page1 As Page
Private Label1 As Label
End Sub
Private Sub Application_Start (Nav As NavigationController)
NavControl = Nav
Page1.Initialize("Page1")
NavControl.ShowPage(Page1)
End Sub
Private Sub Page1_Resize(Width As Int, Height As Int)
Page1.Title="Brightness"
Page1.RootPanel.RemoveAllViews
Page1.RootPanel.Color=Colors.White
Label1.Initialize("")
Page1.RootPanel.AddView(Label1,Width/2,Height/2,10,10)
Label1.Text="Click screen to get brightness"
Label1.SizeToFit
Label1.Left=Page1.RootPanel.Width/2-Label1.Width/2
End Sub
Private Sub Page1_Click
Dim no As NativeObject = Me
Dim bright As Object = no.RunMethod("sampleBrightness", Array ())
Label1.Text=bright
Label1.SizeToFit
Label1.Left=Page1.RootPanel.Width/2-Label1.Width/2
End Sub
#If OBJC
//Following code is based on:
//https://gist.github.com/JoppeSchwartz/72f3efd4ac0b5456dd8a#file-measurebrightness-m-L1
#import <AVFoundation/AVFoundation.h>
#import <ImageIO/ImageIO.h>
-(NSString *)sampleBrightness
{
//Initialize variables
AVCaptureSession *_videoSession = nil;
AVCaptureDevice *_cameraDevice = nil;
AVCaptureStillImageOutput *_imageOutput = nil;
AVCaptureConnection *_avConnection = nil;
NSError *err = nil;
//Set up video capture session
_videoSession = [[AVCaptureSession alloc] init];
if ([_videoSession canSetSessionPreset:AVCaptureSessionPresetLow]) {
_videoSession.sessionPreset = AVCaptureSessionPresetLow;
} else {
return [NSString stringWithFormat:@"Error - could not set AVCaptureSession preset."];
}
//Find front camera device
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionFront) {
_cameraDevice = device;
}
}
//Add front camera device to session
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:_cameraDevice error:&err];
if (err) {
return [NSString stringWithFormat:@"Error instantiating capture input device: %@", err.description];
}
if ([_videoSession canAddInput:input]) {
[_videoSession addInput:input];
}
//Create and add still image output device to the session
_imageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([_videoSession canAddOutput:_imageOutput]){
[_videoSession addOutput:_imageOutput];
}
//Find AV connection
_avConnection = nil;
for (AVCaptureConnection *connection in _imageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
_avConnection = connection;
break;
}
if (_avConnection) break;
}
}
if (!_avConnection) {
return [NSString stringWithFormat:@"Error - could not find AVCaptureConnection for capture session."];
}
[_videoSession startRunning];
//Extract a brightness reading
if (_imageOutput) {
__block NSString *myResult = nil;
[_imageOutput captureStillImageAsynchronouslyFromConnection:_avConnection completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (error) {
myResult = [NSString stringWithFormat:@"Error taking picture: %@", error.description];
return;
}
CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments) {
// Do something with the attachments.
bool containsBrightnessKey = CFDictionaryContainsKey(exifAttachments, kCGImagePropertyExifBrightnessValue);
if (!containsBrightnessKey) {
myResult = [NSString stringWithFormat:@"Error - EXIF dictionary doesn't contain brightness key"];
return;
}
CFStringRef brightness = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifBrightnessValue);
myResult = (__bridge NSString*)brightness;
return;
} else {
myResult = [NSString stringWithFormat:@"Error - no exifAttachments."];
return;
}
}];
[_videoSession stopRunning];
return myResult;
}
[_videoSession stopRunning];
return [NSString stringWithFormat:@"Error - no image output."];
}
#End If
When I say "seem to have managed to get it working" what I really mean is that it doesn't fall over, makes an appropriate camera shutter sound when you use it and gives some sort of reading.
What I am not so confident about is the actual readings - they seem to generally reflect the light conditions but can produce identical readings in slightly different light conditions and in some cases can be negative.
I would really appreciate anyone with a good Objective C background to have a look at this - if they also understand AVFoundation and AVCapture... stuff that would probably be useful.
I have appended the code as a zip below.
Thanks...