@@ 18,6 18,8 @@
#define AUDIO_QUEUE_BUFFERS_COUNT 16
#define MAX_PACKET_DESCRIPTIONS 512
+// -----------------------------------------------------------------------------
+
typedef NS_ENUM(NSUInteger, ZANStreamPlaybackState)
{
ZANStreamPlaybackStateStopped = 0,
@@ 25,6 27,8 @@ typedef NS_ENUM(NSUInteger, ZANStreamPlaybackState)
ZANStreamPlaybackStatePlaying,
};
+// -----------------------------------------------------------------------------
+
static void _ZANPropertyListenerCallback(void *clientData,
AudioFileStreamID fileStream,
AudioFileStreamPropertyID propertyID,
@@ 52,6 56,16 @@ static void _ZANAudioQueueProcessingTapCallback(void *clientData,
UInt32 *outFramesCount,
AudioBufferList *data);
+// -----------------------------------------------------------------------------
+
+@interface ZANURLSessionDelegateWeakForwarder : NSObject <NSURLSessionDataDelegate>
+
+@property (nonatomic, weak) id<NSURLSessionDataDelegate> forwardedDelegate;
+
+@end
+
+// -----------------------------------------------------------------------------
+
@interface ZANStreamPlayer () <NSURLSessionDataDelegate>
@property (nonatomic, assign) ZANStreamPlaybackState playbackState;
@@ 62,6 76,11 @@ static void _ZANAudioQueueProcessingTapCallback(void *clientData,
{
NSURLSession *_urlSession;
NSURLSessionDataTask *_dataTask;
+ NSUInteger _metaInterval;
+ NSUInteger _bytesReadSinceMeta;
+ NSMutableData *_currentMetaPayload;
+ NSUInteger _currentMetaLength;
+
AudioFileStreamID _audioFileStream;
AudioQueueRef _audioQueue;
AudioQueueBufferRef _audioQueueBuffers[AUDIO_QUEUE_BUFFERS_COUNT];
@@ 90,6 109,8 @@ static void _ZANAudioQueueProcessingTapCallback(void *clientData,
_url = [url copy];
_options = options;
+ _currentMetaPayload = [[NSMutableData alloc] init];
+
_inputQueue = [[NSOperationQueue alloc] init];
_inputQueue.maxConcurrentOperationCount = 1;
@@ 97,7 118,9 @@ static void _ZANAudioQueueProcessingTapCallback(void *clientData,
pthread_cond_init(&_queueBufferReadyCondition, NULL);
NSURLSessionConfiguration *sessionConfig = [NSURLSessionConfiguration defaultSessionConfiguration];
- _urlSession = [NSURLSession sessionWithConfiguration:sessionConfig delegate:self delegateQueue:_inputQueue];
+ ZANURLSessionDelegateWeakForwarder *urlSessionDelegate = [[ZANURLSessionDelegateWeakForwarder alloc] init];
+ urlSessionDelegate.forwardedDelegate = self;
+ _urlSession = [NSURLSession sessionWithConfiguration:sessionConfig delegate:urlSessionDelegate delegateQueue:_inputQueue];
}
return self;
}
@@ 229,8 252,64 @@ static void _ZANAudioQueueProcessingTapCallback(void *clientData,
break;
}
+ // read the meta interval out of the response, if necessary
+ if (_options & ZANStreamPlayerOptionRequestMetadata && _metaInterval == 0) {
+ NSHTTPURLResponse *response = (NSHTTPURLResponse *)dataTask.response;
+ NSString *metaIntervalString = [[response allHeaderFields] objectForKey:@"icy-metaint"];
+ _metaInterval = (NSUInteger)[metaIntervalString integerValue];
+ }
+
+ // if we are getting metadata with the stream, we need to parse that out
+ NSData *audioData = nil;
+ if (_metaInterval > 0) {
+ NSMutableData *mutableAudioData = [[NSMutableData alloc] initWithCapacity:data.length];
+ [data enumerateByteRangesUsingBlock:^(const void *bytes, NSRange byteRange, BOOL *stop) {
+ NSUInteger bytesRead = 0;
+
+ while (bytesRead < byteRange.length) {
+ const NSUInteger remainingBytes = byteRange.length - bytesRead;
+ const void *currentPtr = bytes + bytesRead;
+
+ if (_currentMetaLength > 0) { // currently reading metadata
+ const NSUInteger remainingMetaBytes = _currentMetaLength - [_currentMetaPayload length];
+ const NSUInteger bytesToAppend = MIN(remainingMetaBytes, remainingBytes);
+ [_currentMetaPayload appendBytes:currentPtr length:bytesToAppend];
+
+ if (_currentMetaPayload.length == _currentMetaLength) {
+ [self _processMetadataUpdate:_currentMetaPayload];
+
+ _currentMetaPayload.length = 0;
+ _currentMetaLength = 0;
+ _bytesReadSinceMeta = 0;
+ }
+
+ bytesRead += bytesToAppend;
+ } else if (_bytesReadSinceMeta == _metaInterval) { // currently reading metaint
+ uint8_t metaLength = *(uint8_t *)currentPtr * 16;
+ if (metaLength > 0) {
+ _currentMetaLength = (NSUInteger)metaLength;
+ } else {
+ _bytesReadSinceMeta = 0;
+ }
+
+ bytesRead += 1;
+ } else { // currently reading audio data
+ const NSUInteger audioBytesToRead = MIN(_metaInterval - _bytesReadSinceMeta, remainingBytes);
+ [mutableAudioData appendBytes:currentPtr length:audioBytesToRead];
+
+ _bytesReadSinceMeta += audioBytesToRead;
+ bytesRead += audioBytesToRead;
+ }
+ }
+ }];
+
+ audioData = mutableAudioData;
+ } else {
+ audioData = data;
+ }
+
// write the data to the audio stream
- OSStatus status = AudioFileStreamParseBytes(_audioFileStream, (UInt32)data.length, data.bytes, 0);
+ OSStatus status = AudioFileStreamParseBytes(_audioFileStream, (UInt32)audioData.length, audioData.bytes, 0);
if (status != noErr) {
[self _logError:@"Failed to write data to audio stream (OSStatus = %d)", status];
error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
@@ 244,6 323,195 @@ static void _ZANAudioQueueProcessingTapCallback(void *clientData,
}
}
+#pragma mark - Callbacks
+
+- (void)_handlePropertyChangeForFileStream:(AudioFileStreamID)stream
+ withPropertyID:(AudioFileStreamPropertyID)propertyID
+ flags:(UInt32 *)flags
+{
+ OSStatus status = noErr;
+
+ switch (propertyID) {
+ case kAudioFileStreamProperty_DataOffset: {
+ SInt64 offset = 0;
+ UInt32 propertySize = sizeof(SInt64);
+ status = AudioFileStreamGetProperty(stream, kAudioFileStreamProperty_DataOffset, &propertySize, &offset);
+ if (status == noErr) {
+ _dataOffset = (NSUInteger)offset;
+ }
+ } break;
+
+ case kAudioFileStreamProperty_AudioDataByteCount: {
+ UInt64 byteCount = 0;
+ UInt32 propertySize = sizeof(UInt64);
+ status = AudioFileStreamGetProperty(stream, kAudioFileStreamProperty_AudioDataByteCount, &propertySize, &byteCount);
+ if (status == noErr) {
+ _audioFileLength = (NSUInteger)byteCount;
+ }
+ } break;
+
+ case kAudioFileStreamProperty_DataFormat: {
+ AudioStreamBasicDescription format = {0};
+ UInt32 propertySize = sizeof(AudioStreamBasicDescription);
+ status = AudioFileStreamGetProperty(stream, kAudioFileStreamProperty_DataFormat, &propertySize, &format);
+ if (status == noErr) {
+ _audioStreamDescription = format;
+ }
+ } break;
+
+ case kAudioFileStreamProperty_FormatList: {
+ AudioFormatListItem *formatList = NULL;
+
+ do {
+ // get the size of the format list
+ UInt32 formatListSize = 0;
+ status = AudioFileStreamGetPropertyInfo(stream, kAudioFileStreamProperty_FormatList, &formatListSize, NULL);
+ if (status != noErr) {
+ [self _logError:@"Failed to get format list size (OSStatus = %d)", status];
+ break;
+ }
+
+ // get the new list of formats
+ formatList = (AudioFormatListItem *)malloc(formatListSize);
+ status = AudioFileStreamGetProperty(stream, kAudioFileStreamProperty_FormatList, &formatListSize, formatList);
+ if (status != noErr) {
+ [self _logError:@"Failed to get format list data (OSStatus = %d)", status];
+ break;
+ }
+
+ // find the AAC format that we're interested in parsing
+ unsigned formatListCount = formatListSize / sizeof(AudioFormatListItem);
+ for (unsigned i = 0; i < formatListCount; ++i) {
+ AudioFormatListItem formatItem = formatList[i];
+ AudioStreamBasicDescription format = formatItem.mASBD;
+ if (format.mFormatID == kAudioFormatMPEG4AAC_HE || format.mFormatID == kAudioFormatMPEG4AAC_HE_V2) {
+ _audioStreamDescription = format;
+ break;
+ }
+ }
+ } while (0);
+
+ if (formatList) {
+ free(formatList);
+ }
+ } break;
+ }
+
+ if (status != noErr) {
+ [self _handleError:[NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil]];
+ }
+}
+
+- (void)_handleAudioPacketsAvailableWithData:(NSData *)audioData
+ packetsCount:(NSUInteger)packetsCount
+ packetDescriptions:(AudioStreamPacketDescription *)packetDescriptions
+{
+ NSError *error = nil;
+
+ do {
+ // make sure we have an audio queue initialized first
+ if (!_audioQueue) {
+ [self _createAudioOutputQueueWithError:&error];
+ }
+ if (error) {
+ [self _logError:@"Failed to create output queue. %@", error];
+ break;
+ }
+
+ // parse packets
+ for (unsigned i = 0; i < packetsCount; ++i) {
+ SInt64 packetOffset = packetDescriptions[i].mStartOffset;
+ SInt64 packetSize = packetDescriptions[i].mDataByteSize;
+ size_t spaceRemaining = _packetBufferSize - _bytesFilled;
+
+ if (spaceRemaining < packetSize) {
+ [self _enqueueAudioBuffer];
+ }
+
+ // wait until the current buffer is available
+ pthread_mutex_lock(&_queueBuffersMutex);
+ while (_queueBuffersUsageStates[_currentBufferIndex]) {
+ pthread_cond_wait(&_queueBufferReadyCondition, &_queueBuffersMutex);
+ }
+ pthread_mutex_unlock(&_queueBuffersMutex);
+
+ // copy audio data into buffer
+ AudioQueueBufferRef buffer = _audioQueueBuffers[_currentBufferIndex];
+ memcpy(buffer->mAudioData + _bytesFilled, audioData.bytes + packetOffset, packetSize);
+
+ // store packet description
+ AudioStreamPacketDescription packetDescription = packetDescriptions[i];
+ packetDescription.mStartOffset = _bytesFilled;
+ _packetDescriptions[_packetsFilled] = packetDescription;
+
+ _bytesFilled += packetSize;
+ _packetsFilled += 1;
+
+ NSUInteger packetsRemaining = MAX_PACKET_DESCRIPTIONS - _packetsFilled;
+ if (packetsRemaining == 0) {
+ [self _enqueueAudioBuffer];
+ }
+ }
+ } while (0);
+
+ if (error) {
+ [self _logError:@"Error encountered while handling audio packets. %@", error];
+ [self _handleError:error];
+ }
+}
+
+- (void)_handleAudioQueue:(AudioQueueRef)queue propertyDidChange:(AudioQueuePropertyID)property
+{
+ NSAssert(queue == _audioQueue, @"Incorrect audio queue input for property change");
+
+ if (property == kAudioQueueProperty_IsRunning) {
+ UInt32 isRunning = 0;
+ UInt32 propertySize = sizeof(UInt32);
+ AudioQueueGetProperty(_audioQueue, kAudioQueueProperty_IsRunning, &isRunning, &propertySize);
+
+ [self _logMessage:@"Received IsRunning property state change for queue %p. New value = %u", queue, isRunning];
+
+ if (self.playing && !isRunning) {
+ self.playbackState = ZANStreamPlaybackStateStopped;
+ }
+ }
+}
+
+- (void)_handleBufferCompleteFromQueue:(AudioQueueRef)queue buffer:(AudioQueueBufferRef)buffer
+{
+ NSInteger bufferIdx = NSNotFound;
+ for (unsigned i = 0; i < AUDIO_QUEUE_BUFFERS_COUNT; ++i) {
+ if (buffer == _audioQueueBuffers[i]) {
+ bufferIdx = i;
+ break;
+ }
+ }
+
+ NSAssert(bufferIdx != NSNotFound, @"An unknown audio buffer was completed");
+
+ pthread_mutex_lock(&_queueBuffersMutex);
+ _queueBuffersUsageStates[bufferIdx] = NO;
+ pthread_cond_signal(&_queueBufferReadyCondition);
+ pthread_mutex_unlock(&_queueBuffersMutex);
+}
+
+- (void)_handleTapCallbackFromTap:(AudioQueueProcessingTapRef)tap
+ withFramesCount:(UInt32)inNumberFrames
+ audioTimestamp:(AudioTimeStamp *)ioTimeStamp
+ processingTapFlags:(AudioQueueProcessingTapFlags *)flags
+ outFramesCount:(UInt32 *)outNumberFrames
+ data:(AudioBufferList *)ioData
+{
+ OSStatus status = AudioQueueProcessingTapGetSourceAudio(tap, inNumberFrames, ioTimeStamp, flags, outNumberFrames, ioData);
+ if (status != noErr) {
+ [self _logError:@"Failed to get source audio in processing tap callback (OSStatus = %d)", status];
+ } else {
+ AudioBuffer *buffer = &ioData->mBuffers[0];
+ NSData *audioData = [NSData dataWithBytesNoCopy:buffer->mData length:buffer->mDataByteSize freeWhenDone:NO];
+ [_delegate streamPlayer:self didDecodeAudioData:audioData withFramesCount:inNumberFrames format:&_processingFormat];
+ }
+}
+
#pragma mark - Internal
- (BOOL)_openReadStream
@@ 254,7 522,11 @@ static void _ZANAudioQueueProcessingTapCallback(void *clientData,
[_dataTask cancel];
}
- NSURLRequest *request = [[NSURLRequest alloc] initWithURL:_url];
+ NSMutableURLRequest *request = [[NSMutableURLRequest alloc] initWithURL:_url];
+ if (_options & ZANStreamPlayerOptionRequestMetadata) {
+ [request setValue:@"1" forHTTPHeaderField:@"Icy-MetaData"];
+ }
+
_dataTask = [_urlSession dataTaskWithRequest:request];
[_dataTask resume];
@@ 268,6 540,10 @@ static void _ZANAudioQueueProcessingTapCallback(void *clientData,
[_dataTask cancel];
_dataTask = nil;
+ _currentMetaPayload.length = 0;
+ _currentMetaLength = 0;
+ _bytesReadSinceMeta = 0;
+
return YES;
}
@@ 399,7 675,8 @@ static void _ZANAudioQueueProcessingTapCallback(void *clientData,
}
}
- AudioQueueDispose(_audioQueue, true);
+ AudioQueuePause(_audioQueue);
+ AudioQueueDispose(_audioQueue, false);
_audioQueue = NULL;
}
}
@@ 441,190 718,32 @@ static void _ZANAudioQueueProcessingTapCallback(void *clientData,
} while (0);
}
-- (void)_handlePropertyChangeForFileStream:(AudioFileStreamID)stream
- withPropertyID:(AudioFileStreamPropertyID)propertyID
- flags:(UInt32 *)flags
-{
- OSStatus status = noErr;
-
- switch (propertyID) {
- case kAudioFileStreamProperty_DataOffset: {
- SInt64 offset = 0;
- UInt32 propertySize = sizeof(SInt64);
- status = AudioFileStreamGetProperty(stream, kAudioFileStreamProperty_DataOffset, &propertySize, &offset);
- if (status == noErr) {
- _dataOffset = (NSUInteger)offset;
- }
- } break;
-
- case kAudioFileStreamProperty_AudioDataByteCount: {
- UInt64 byteCount = 0;
- UInt32 propertySize = sizeof(UInt64);
- status = AudioFileStreamGetProperty(stream, kAudioFileStreamProperty_AudioDataByteCount, &propertySize, &byteCount);
- if (status == noErr) {
- _audioFileLength = (NSUInteger)byteCount;
- }
- } break;
-
- case kAudioFileStreamProperty_DataFormat: {
- AudioStreamBasicDescription format = {0};
- UInt32 propertySize = sizeof(AudioStreamBasicDescription);
- status = AudioFileStreamGetProperty(stream, kAudioFileStreamProperty_DataFormat, &propertySize, &format);
- if (status == noErr) {
- _audioStreamDescription = format;
- }
- } break;
-
- case kAudioFileStreamProperty_FormatList: {
- AudioFormatListItem *formatList = NULL;
-
- do {
- // get the size of the format list
- UInt32 formatListSize = 0;
- status = AudioFileStreamGetPropertyInfo(stream, kAudioFileStreamProperty_FormatList, &formatListSize, NULL);
- if (status != noErr) {
- [self _logError:@"Failed to get format list size (OSStatus = %d)", status];
- break;
- }
-
- // get the new list of formats
- formatList = (AudioFormatListItem *)malloc(formatListSize);
- status = AudioFileStreamGetProperty(stream, kAudioFileStreamProperty_FormatList, &formatListSize, formatList);
- if (status != noErr) {
- [self _logError:@"Failed to get format list data (OSStatus = %d)", status];
- break;
- }
-
- // find the AAC format that we're interested in parsing
- unsigned formatListCount = formatListSize / sizeof(AudioFormatListItem);
- for (unsigned i = 0; i < formatListCount; ++i) {
- AudioFormatListItem formatItem = formatList[i];
- AudioStreamBasicDescription format = formatItem.mASBD;
- if (format.mFormatID == kAudioFormatMPEG4AAC_HE || format.mFormatID == kAudioFormatMPEG4AAC_HE_V2) {
- _audioStreamDescription = format;
- break;
- }
- }
- } while (0);
-
- if (formatList) {
- free(formatList);
- }
- } break;
- }
-
- if (status != noErr) {
- [self _handleError:[NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil]];
- }
-}
-
-- (void)_handleAudioPacketsAvailableWithData:(NSData *)audioData
- packetsCount:(NSUInteger)packetsCount
- packetDescriptions:(AudioStreamPacketDescription *)packetDescriptions
-{
- NSError *error = nil;
-
- do {
- // make sure we have an audio queue initialized first
- if (!_audioQueue) {
- [self _createAudioOutputQueueWithError:&error];
- }
- if (error) {
- [self _logError:@"Failed to create output queue. %@", error];
- break;
- }
-
- // parse packets
- for (unsigned i = 0; i < packetsCount; ++i) {
- SInt64 packetOffset = packetDescriptions[i].mStartOffset;
- SInt64 packetSize = packetDescriptions[i].mDataByteSize;
- size_t spaceRemaining = _packetBufferSize - _bytesFilled;
-
- if (spaceRemaining < packetSize) {
- [self _enqueueAudioBuffer];
- }
-
- // wait until the current buffer is available
- pthread_mutex_lock(&_queueBuffersMutex);
- while (_queueBuffersUsageStates[_currentBufferIndex]) {
- pthread_cond_wait(&_queueBufferReadyCondition, &_queueBuffersMutex);
- }
- pthread_mutex_unlock(&_queueBuffersMutex);
-
- // copy audio data into buffer
- AudioQueueBufferRef buffer = _audioQueueBuffers[_currentBufferIndex];
- memcpy(buffer->mAudioData + _bytesFilled, audioData.bytes + packetOffset, packetSize);
-
- // store packet description
- AudioStreamPacketDescription packetDescription = packetDescriptions[i];
- packetDescription.mStartOffset = _bytesFilled;
- _packetDescriptions[_packetsFilled] = packetDescription;
-
- _bytesFilled += packetSize;
- _packetsFilled += 1;
-
- NSUInteger packetsRemaining = MAX_PACKET_DESCRIPTIONS - _packetsFilled;
- if (packetsRemaining == 0) {
- [self _enqueueAudioBuffer];
+- (void)_processMetadataUpdate:(NSData *)metadata
+{
+ /* metadata is in the following format: "StreamTitle='Charmer & Kadenza - Garden of Dreams (HandzUpNightcore Remix)';StreamUrl='';" */
+ NSString *metadataString = [[NSString alloc] initWithData:metadata encoding:NSUTF8StringEncoding];
+ NSArray<NSString *> *metadataComponents = [metadataString componentsSeparatedByString:@";"];
+ NSRegularExpression *regex = [NSRegularExpression regularExpressionWithPattern:@"(\\w+)='(.*)'" options:0 error:nil];
+ NSMutableDictionary *metadataDict = [[NSMutableDictionary alloc] init];
+
+ for (NSString *metadataComponent in metadataComponents) {
+ NSRange range = NSMakeRange(0, metadataComponent.length);
+ [regex enumerateMatchesInString:metadataComponent options:0 range:range usingBlock:^(NSTextCheckingResult *result,
+ NSMatchingFlags flags,
+ BOOL *stop)
+ {
+ if (result.numberOfRanges >= 3) {
+ NSString *key = [metadataComponent substringWithRange:[result rangeAtIndex:1]];
+ NSString *value = [metadataComponent substringWithRange:[result rangeAtIndex:2]];
+ [metadataDict setObject:value forKey:key];
}
- }
- } while (0);
-
- if (error) {
- [self _logError:@"Error encountered while handling audio packets. %@", error];
- [self _handleError:error];
- }
-}
-
-- (void)_handleAudioQueue:(AudioQueueRef)queue propertyDidChange:(AudioQueuePropertyID)property
-{
- NSAssert(queue == _audioQueue, @"Incorrect audio queue input for property change");
-
- if (property == kAudioQueueProperty_IsRunning) {
- UInt32 isRunning = 0;
- UInt32 propertySize = sizeof(UInt32);
- AudioQueueGetProperty(_audioQueue, kAudioQueueProperty_IsRunning, &isRunning, &propertySize);
-
- [self _logMessage:@"Received IsRunning property state change for queue %p. New value = %u", queue, isRunning];
-
- if (self.playing && !isRunning) {
- self.playbackState = ZANStreamPlaybackStateStopped;
- }
- }
-}
-
-- (void)_handleBufferCompleteFromQueue:(AudioQueueRef)queue buffer:(AudioQueueBufferRef)buffer
-{
- NSInteger bufferIdx = NSNotFound;
- for (unsigned i = 0; i < AUDIO_QUEUE_BUFFERS_COUNT; ++i) {
- if (buffer == _audioQueueBuffers[i]) {
- bufferIdx = i;
- break;
- }
+ }];
}
- NSAssert(bufferIdx != NSNotFound, @"An unknown audio buffer was completed");
+ _currentMetadata = metadataDict;
- pthread_mutex_lock(&_queueBuffersMutex);
- _queueBuffersUsageStates[bufferIdx] = NO;
- pthread_cond_signal(&_queueBufferReadyCondition);
- pthread_mutex_unlock(&_queueBuffersMutex);
-}
-
-- (void)_handleTapCallbackFromTap:(AudioQueueProcessingTapRef)tap
- withFramesCount:(UInt32)inNumberFrames
- audioTimestamp:(AudioTimeStamp *)ioTimeStamp
- processingTapFlags:(AudioQueueProcessingTapFlags *)flags
- outFramesCount:(UInt32 *)outNumberFrames
- data:(AudioBufferList *)ioData
-{
- OSStatus status = AudioQueueProcessingTapGetSourceAudio(tap, inNumberFrames, ioTimeStamp, flags, outNumberFrames, ioData);
- if (status != noErr) {
- [self _logError:@"Faield to get source audio in processing tap callback (OSStatus = %d)", status];
- } else {
- AudioBuffer *buffer = &ioData->mBuffers[0];
- NSData *audioData = [NSData dataWithBytesNoCopy:buffer->mData length:buffer->mDataByteSize freeWhenDone:NO];
- [_delegate streamPlayer:self didDecodeAudioData:audioData withFramesCount:inNumberFrames format:&_processingFormat];
+ if ([_delegate respondsToSelector:@selector(streamPlayer:didReceiveMetadataUpdate:)]) {
+ [_delegate streamPlayer:self didReceiveMetadataUpdate:metadataDict];
}
}
@@ 730,3 849,21 @@ void _ZANAudioQueueProcessingTapCallback(void *clientData,
}
@end
+
+@implementation ZANURLSessionDelegateWeakForwarder
+
+- (void)URLSession:(NSURLSession *)session task:(NSURLSessionTask *)task didCompleteWithError:(NSError *)error
+{
+ if ([_forwardedDelegate respondsToSelector:@selector(URLSession:task:didCompleteWithError:)]) {
+ [_forwardedDelegate URLSession:session task:task didCompleteWithError:error];
+ }
+}
+
+- (void)URLSession:(NSURLSession *)session dataTask:(NSURLSessionDataTask *)dataTask didReceiveData:(NSData *)data
+{
+ if ([_forwardedDelegate respondsToSelector:@selector(URLSession:dataTask:didReceiveData:)]) {
+ [_forwardedDelegate URLSession:session dataTask:dataTask didReceiveData:data];
+ }
+}
+
+@end