Skip to content

Commit

Permalink
Adding sample checks
Browse files Browse the repository at this point in the history
  • Loading branch information
sirknightj committed Feb 7, 2025
1 parent 9e057c2 commit b885357
Show file tree
Hide file tree
Showing 2 changed files with 153 additions and 26 deletions.
168 changes: 143 additions & 25 deletions .github/workflows/samples.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,13 @@ jobs:
args: -f sample.mp4
- name: kvs_gstreamer_file_uploader_sample
args: sample.mp4 0 audio-video
# - name: kvs_gstreamer_multistream_sample
# args: ""
- name: kvs_gstreamer_sample
args: sample.mp4
- name: kvssink_gstreamer_sample
args: sample.mp4
runner:
- id: macos-latest
image: macos-latest
- id: macos-13
image: macos-13

- id: ubuntu-22.04
image: ubuntu-latest
Expand All @@ -50,8 +48,9 @@ jobs:

env:
AWS_KVS_LOG_LEVEL: 2
KVS_DEBUG_DUMP_DATA_FILE_DIR: ./debug_output
KVS_DEBUG_DUMP_DATA_FILE_DIR: ${{ github.workspace }}/build/debug_output
DEBIAN_FRONTEND: noninteractive
GST_PLUGIN_PATH: ${{ github.workspace }}/build

permissions:
id-token: write
Expand Down Expand Up @@ -92,17 +91,14 @@ jobs:
- name: Build samples (Windows)
if: runner.os == 'Windows'
shell: cmd
run: |
@echo on
set PATH=%PATH%;C:\Strawberry\perl\site\bin;C:\Strawberry\perl\bin;C:\Strawberry\c\bin;C:\Program Files\NASM;D:\a\amazon-kinesis-video-streams-producer-c\amazon-kinesis-video-streams-producer-c\open-source\lib;D:\a\amazon-kinesis-video-streams-producer-c\amazon-kinesis-video-streams-producer-c\open-source\bin
$env:Path += ';C:\Strawberry\perl\site\bin;C:\Strawberry\perl\bin;C:\Strawberry\c\bin;C:\Program Files\NASM;D:\producer\open-source\local\lib;D:\producer\open-source\local\bin'
mkdir D:\producer
Move-Item -Path "D:\a\amazon-kinesis-video-streams-producer-sdk-cpp\amazon-kinesis-video-streams-producer-sdk-cpp\*" -Destination "D:\producer"
cd D:\producer
git config --system core.longpaths true
"C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat"
mkdir build
cd build
cmake -G "NMake Makefiles" -DBUILD_GSTREAMER_PLUGIN=ON -DPKG_CONFIG_EXECUTABLE="D:\\gstreamer\\1.0\\msvc_x86_64\\bin\\pkg-config.exe" ..
nmake
mkdir "%KVS_DEBUG_DUMP_DATA_FILE_DIR%"
dir
.github\build_windows.bat
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
Expand All @@ -114,9 +110,6 @@ jobs:

- name: Run ${{ matrix.sample.name }} (Linux & Mac)
if: runner.os == 'Linux' || runner.os == 'macOS'
env:
GST_PLUGIN_PATH: ${{ github.workspace }}/build
KVS_DEBUG_DUMP_DATA_FILE_DIR: ${{ github.workspace }}/build/debug_output
working-directory: ./build
run: |
curl -fsSL -o sample.mp4 https://awsj-iot-handson.s3-ap-northeast-1.amazonaws.com/kvs-workshop/sample.mp4
Expand All @@ -125,15 +118,21 @@ jobs:
- name: Run ${{ matrix.sample.name }} (Windows)
if: runner.os == 'Windows'
env:
GST_PLUGIN_PATH: ${{ github.workspace }}/build
KVS_DEBUG_DUMP_DATA_FILE_DIR: ${{ github.workspace }}/build/debug_output
working-directory: ./build
GST_PLUGIN_PATH: D:\producer\build
KVS_DEBUG_DUMP_DATA_FILE_DIR: D:\producer\debug_output
working-directory: D:\producer\build
run: |
# Equivalent to set -x
Set-PSDebug -Trace 1
$env:Path += ';C:\Strawberry\perl\site\bin;C:\Strawberry\perl\bin;C:\Strawberry\c\bin;C:\Program Files\NASM;D:\producer\open-source\local\lib;D:\producer\open-source\local\bin;D:\gstreamer\1.0\msvc_x86_64\bin'
mkdir D:\producer\debug_output
Invoke-WebRequest -Uri https://awsj-iot-handson.s3-ap-northeast-1.amazonaws.com/kvs-workshop/sample.mp4 -OutFile sample.mp4
$exePath = Join-Path $PWD ${{ matrix.sample.name }}
& $exePath.exe demo-stream-producer-cpp-${{ matrix.runner.id }}-ci-${{ matrix.sample.name }} ${{ matrix.sample.args }}
dir
$exePath = Join-Path $PWD ${{ matrix.sample.name }}.exe
& $exePath demo-stream-producer-cpp-${{ matrix.runner.id }}-ci-${{ matrix.sample.name }} ${{ matrix.sample.args }}
- name: Verify MKV dump (Mac & Linux)
if: runner.os == 'Linux' || runner.os == 'macOS'
Expand All @@ -154,13 +153,13 @@ jobs:
done
shell: bash

- name: Verify MKV dump
- name: Verify MKV dump (Windows)
if: runner.os == 'Windows'
working-directory: D:\producer\build
run: |
$env:Path += ";C:\Program Files\MKVToolNix"
dir debug_output
$mkvFiles = Get-ChildItem -Path "D:\producer\build\debug_output" -Filter *.mkv
dir D:\producer\debug_output
$mkvFiles = Get-ChildItem -Path "D:\producer\debug_output" -Filter *.mkv
if ($mkvFiles.Count -eq 0) {
Write-Error "No MKV files found in D:\producer\build\debug_output"
exit 1
Expand All @@ -170,3 +169,122 @@ jobs:
Write-Output "Verifying $($file.FullName) with mkvinfo (verbose and hexdump):"
mkvinfo.exe -v -X "$($file.FullName)"
}
multistream-sample:
name: Multistream sample on Mac
runs-on: macos-13
timeout-minutes: 30

env:
AWS_KVS_LOG_LEVEL: 2
KVS_DEBUG_DUMP_DATA_FILE_DIR: ${{ github.workspace }}/build/debug_output
GST_PLUGIN_PATH: ${{ github.workspace }}/build
DEBIAN_FRONTEND: noninteractive

permissions:
id-token: write
contents: read

steps:
- name: Clone repository
uses: actions/checkout@v4

- name: Install dependencies
run: |
brew install gstreamer log4cplus mkvtoolnix coreutils
brew install --cask docker
- name: Build samples
run: |
mkdir build && cd build
mkdir -p $KVS_DEBUG_DUMP_DATA_FILE_DIR
cmake .. -DBUILD_GSTREAMER_PLUGIN=ON -DBUILD_DEPENDENCIES=OFF
make -j$(nproc)
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME }}
role-session-name: ${{ secrets.AWS_ROLE_SESSION_NAME }}
aws-region: ${{ secrets.AWS_REGION }}
role-duration-seconds: 10800

- name: Run multistream sample
working-directory: ./build
run: |
set -x
open -a /Applications/Docker.app --args --unattended --accept-license
echo "We are waiting for Docker to be up and running. It can take over 2 minutes..."
while ! /Applications/Docker.app/Contents/Resources/bin/docker info &>/dev/null; do sleep 1; done
sudo ln -s ~/.docker/run/docker.sock /var/run/docker.sock
docker run -d --rm -it -e RTSP_PROTOCOLS=tcp -p 8554:8554 bluenviron/mediamtx:latest
docker run -d --rm -it -e RTSP_PROTOCOLS=tcp -p 8555:8554 bluenviron/mediamtx:latest
(
ffmpeg -re -f lavfi -i "testsrc=size=640x480:rate=10" -vcodec libx264 -x264-params keyint=25 -f rtsp rtsp://localhost:8554/mystream
) &
(
ffmpeg -re -f lavfi -i "testsrc=size=640x480:rate=10" -vcodec libx264 -x264-params keyint=25 -f rtsp rtsp://localhost:8555/mystream
) &
echo "rtsp://0.0.0.0:8554/mystream" > rtsp-urls.txt
echo "rtsp://0.0.0.0:8555/mystream" >> rtsp-urls.txt
sleep 10
gst-discoverer-1.0 rtsp://0.0.0.0:8554/mystream
gst-discoverer-1.0 rtsp://0.0.0.0:8555/mystream
set +e # Disable exit on error for the timeout command
gtimeout --preserve-status --signal=SIGINT --kill-after=15s 30s \
./kvs_gstreamer_multistream_sample demo-stream-producer-cpp-macos-13-ci-kvs_gstreamer_multistream_sample rtsp-urls.txt
EXIT_CODE=$?
set -e # Re-enable exit on error
# 130 (128 + 2): Process killed by SIGINT
# 137: Process killed by SIGKILL (if the --kill-after timeout is reached)
echo "Command exited with code: $EXIT_CODE"
if [ $EXIT_CODE -ne 130 ]; then
echo "Command did not exit gracefully after interrupt."
exit 1
fi
shell: bash

- name: Verify MKV dump
working-directory: ./build/debug_output
run: |
shopt -s nullglob # Ensure globbing works correctly and avoids errors when no files are found
ls -tlrh
mkvfiles=(*.mkv)
if [ ${#mkvfiles[@]} -eq 0 ]; then
echo "No MKV files found in debug_output"
exit 1
fi
found_0=0
found_1=0
for file in "${mkvfiles[@]}"; do
if [[ "$file" == demo-stream-producer-cpp-macos-13-ci-kvs_gstreamer_multistream_sample_0* ]]; then
found_0=1
elif [[ "$file" == demo-stream-producer-cpp-macos-13-ci-kvs_gstreamer_multistream_sample_1* ]]; then
found_1=1
fi
done
if [ $found_0 -eq 0 ] || [ $found_1 -eq 0 ]; then
echo "Expected at least one file starting with each prefix:"
echo " - demo-stream-producer-cpp-macos-13-ci-kvs_gstreamer_multistream_sample_0"
echo " - demo-stream-producer-cpp-macos-13-ci-kvs_gstreamer_multistream_sample_1"
exit 1
fi
for file in "${mkvfiles[@]}"; do
echo "Verifying $file with mkvinfo (verbose and hexdump):"
mkvinfo -v -X "$file"
done
shell: bash
11 changes: 10 additions & 1 deletion samples/kvs_gstreamer_multistream_sample.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ LOGGER_TAG("com.amazonaws.kinesis.video.gstreamer");
#define DEFAULT_BUFFER_SIZE (1 * 1024 * 1024)
#define DEFAULT_STORAGE_SIZE (128 * 1024 * 1024)
#define DEFAULT_ROTATION_TIME_SECONDS 3600
#define DEFAULT_FRAME_DURATION_MS 2

namespace com { namespace amazonaws { namespace kinesis { namespace video {

Expand Down Expand Up @@ -180,6 +181,7 @@ typedef struct _CustomData {
// Pts of first frame
map<string, uint64_t> first_pts_map;
map<string, uint64_t> producer_start_time_map;
map<string, uint64_t> last_dts_map;
} CustomData;

void create_kinesis_video_frame(Frame *frame, const nanoseconds &pts, const nanoseconds &dts, FRAME_FLAGS flags,
Expand Down Expand Up @@ -259,6 +261,12 @@ static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) {

buffer->pts += data->producer_start_time_map[stream_handle_key] - data->first_pts_map[stream_handle_key];

if (!GST_BUFFER_DTS_IS_VALID(buffer)) {
buffer->dts = data->last_dts_map[stream_handle_key] + DEFAULT_FRAME_DURATION_MS * HUNDREDS_OF_NANOS_IN_A_MILLISECOND * DEFAULT_TIME_UNIT_IN_NANOS;
}

data->last_dts_map[stream_handle_key] = buffer->dts;

if (false == put_frame(data->kinesis_video_stream_handles[stream_handle_key], data->frame_data_map[stream_handle_key], buffer_size, std::chrono::nanoseconds(buffer->pts),
std::chrono::nanoseconds(buffer->dts), kinesis_video_flags)) {
GST_WARNING("Dropped frame");
Expand Down Expand Up @@ -405,7 +413,8 @@ int gstreamer_init(int argc, char *argv[]) {
data.frame_data_map = map<string, uint8_t*>();
data.frame_data_size_map = map<string, UINT32>();
data.first_pts_map = map<string, uint64_t>();
data.producer_start_time_map = map<string, uint64_t>();;
data.producer_start_time_map = map<string, uint64_t>();
data.last_dts_map = map<string, uint64_t>();;

/* init GStreamer */
gst_init(&argc, &argv);
Expand Down

0 comments on commit b885357

Please sign in to comment.