Skip to content

Commit

Permalink
Merge pull request #726 from 18F/migrate-ga3-to-ga4
Browse files Browse the repository at this point in the history
Update Reporter to output both UA and GA4 data in reports
  • Loading branch information
levinmr authored Jan 31, 2024
2 parents 159f9bd + 94a6e7a commit 142adf4
Show file tree
Hide file tree
Showing 102 changed files with 8,484 additions and 5,693 deletions.
46 changes: 46 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,45 @@ jobs:
cf push analytics-reporter-develop --strategy rolling
cf logout
ga4dev_deploy:
docker:
- image: cimg/node:16.19.1
steps:
- checkout
- restore_cache:
keys:
- v1-dependencies-{{ checksum "package.json" }}
- v1-dependencies-
- run:
name: install dependencies
command: npm install
- save_cache:
paths:
- ./node_modules
key: v1-dependencies-{{ checksum "package.json" }}

- run:
name: Install CF CLI
command: |
sudo curl -v -L -o cf8-cli-installer_8.7.4_x86-64.deb 'https://packages.cloudfoundry.org/stable?release=debian64&version=8.7.4'
sudo dpkg -i cf8-cli-installer_8.7.4_x86-64.deb
- run:
name: Write Google GA4 Credentails file from value in CircleCI env var.
command: |
echo $GA4_CREDS > ./my-analytics-ga4-65057af58daa.json
- run:
name: deploy
command: |
set -e
# Log into cloud.gov
cf api api.fr.cloud.gov
cf login -u $CF_USERNAME_DEV -p $CF_PASSWORD_DEV -o gsa-opp-analytics -s analytics-dev
cf push analytics-reporter-develop-ga4 --strategy rolling
cf logout
staging_deploy:
docker:
- image: cimg/node:16.19.1
Expand Down Expand Up @@ -135,6 +174,13 @@ workflows:
only:
- develop

ga4dev_workflow:
jobs:
- ga4dev_deploy:
filters:
branches:
only:
- migrate-ga3-to-ga4
staging_workflow:
jobs:
- staging_deploy:
Expand Down
14 changes: 14 additions & 0 deletions .mocharc.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
diff: true
extension: ['js']
package: './package.json'
slow: '75'
spec:
- 'test/**/*.js'
- 'ua/test/**/*.js'
timeout: '2000'
ui: 'bdd'
watch-files:
- 'src/**/*.js'
- 'test/**/*.js'
- 'ua/src/**/*.js'
- 'ua/test/**/*.js'
1 change: 1 addition & 0 deletions .nvmrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
20.11
90 changes: 66 additions & 24 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ alias analytics="docker run -t -v ${HOME}:${HOME} -e ANALYTICS_REPORT_EMAIL -e A
To make this command working as expected you should export the env vars as follows:

```bash
export ANALYTICS_REPORT_EMAIL= "your-report-email"
export ANALYTICS_REPORT_EMAIL="your-report-email"
export ANALYTICS_REPORT_IDS="your-report-ids"
export ANALYTICS_KEY="your-key"
```
Expand Down Expand Up @@ -180,47 +180,68 @@ A report might look something like this:
"name": "devices",
"query": {
"dimensions": [
"ga:date",
"ga:deviceCategory"
{
"name": "date"
},
{
"name": "deviceCategory"
}
],
"metrics": [
"ga:sessions"
{
"name": "sessions"
}
],
"start-date": "90daysAgo",
"end-date": "yesterday",
"sort": "ga:date"
"dateRanges": [
{
"startDate": "90daysAgo",
"endDate": "yesterday"
}
],
"orderBys": [
{
"dimension": {
"dimensionName": "date"
},
"desc": true
}
],
"samplingLevel": "HIGHER_PRECISION",
"limit": "10000",
"property": "properties/393249053"
},
"meta": {
"name": "Devices",
"description": "Weekly desktop/mobile/tablet visits by day for all sites."
"description": "90 days of desktop/mobile/tablet visits for all sites."
},
"data": [
{
"date": "2014-10-14",
"device": "desktop",
"visits": "11495462"
"date": "2023-12-25",
"device": "mobile",
"visits": "13681896"
},
{
"date": "2014-10-14",
"device": "mobile",
"visits": "2499586"
"date": "2023-12-25",
"device": "desktop",
"visits": "5775002"
},
{
"date": "2014-10-14",
"date": "2023-12-25",
"device": "tablet",
"visits": "976396"
"visits": "367039"
},
// ...
...
],
"totals": {
"visits": 3584551745,
"devices": {
"mobile": 213920363,
"desktop": 755511646,
"tablet": 81874189
},
"start_date": "2014-10-14",
"end_date": "2015-01-11"
}
"mobile": 2012722956,
"desktop": 1513968883,
"tablet": 52313579,
"smart tv": 5546327
}
},
"taken_at": "2023-12-26T20:52:50.062Z"
}
```

Expand Down Expand Up @@ -344,6 +365,27 @@ Compose:
docker-compose up
```

#### Running unit tests locally

The unit tests require a postgres database to be running and accepting
connections at 127.0.0.1:5432

To run the test database locally with docker:

```shell
docker-compose -f docker-compose.test.yml up
```

The test scripts run database migrations and then the tests themselves. These
require database connection details to be provided in the shell environment:

```shell
POSTGRES_PASSWORD=123abc \
POSTGRES_USER=analytics \
POSTGRES_DATABASE=analytics_reporter_test \
npm test
```

### Public domain

This project is in the worldwide [public domain](LICENSE.md). As stated in [CONTRIBUTING](CONTRIBUTING.md):
Expand Down
144 changes: 81 additions & 63 deletions deploy/cron.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,22 @@ logger.info(" Running /deploy/cron.js");
logger.info("===================================");

const scriptRootPath = `${process.env.ANALYTICS_ROOT_PATH}/deploy`
const scriptUARootPath = `${process.env.ANALYTICS_UA_ROOT_PATH}/deploy`

var api_ua_run = function() {
logger.info("about to run ua api.sh");
console.log(`${scriptUARootPath}/api.sh`)
var api = spawn(`${scriptUARootPath}/api.sh`)
api.stdout.on("data", (data) => {
logger.info("[ua - api.sh]", data.toString().trim())
})
api.stderr.on("data", (data) => {
logger.info("[ua - api.sh]", data.toString().trim())
})
api.on("exit", (code) => {
logger.info("ua - api.sh exitted with code:", code)
})
}

var api_run = function() {
logger.info("about to run api.sh");
Expand All @@ -37,50 +53,50 @@ var api_run = function() {
})
}

// var daily_run = function() {
// logger.info("about to run daily.sh");

// var daily = spawn(`${scriptRootPath}/daily.sh`)
// daily.stdout.on("data", (data) => {
// logger.info("[daily.sh]", data.toString().trim())
// })
// daily.stderr.on("data", (data) => {
// logger.info("[daily.sh]", data.toString().trim())
// })
// daily.on("exit", (code) => {
// logger.info("daily.sh exitted with code:", code)
// })
// }

// var hourly_run = function(){
// logger.info("about to run hourly.sh");

// var hourly = spawn(`${scriptRootPath}/hourly.sh`)
// hourly.stdout.on("data", (data) => {
// logger.info("[hourly.sh]", data.toString().trim())
// })
// hourly.stderr.on("data", (data) => {
// logger.info("[hourly.sh]", data.toString().trim())
// })
// hourly.on("exit", (code) => {
// logger.info("hourly.sh exitted with code:", code)
// })
// }

// var realtime_run = function(){
// logger.info("about to run realtime.sh");

// var realtime = spawn(`${scriptRootPath}/realtime.sh`)
// realtime.stdout.on("data", (data) => {
// logger.info("[realtime.sh]", data.toString().trim())
// })
// realtime.stderr.on("data", (data) => {
// logger.info("[realtime.sh]", data.toString().trim())
// })
// realtime.on("exit", (code) => {
// logger.info("realtime.sh exitted with code:", code)
// })
// }
var daily_run = function() {
logger.info("about to run daily.sh");

var daily = spawn(`${scriptRootPath}/daily.sh`)
daily.stdout.on("data", (data) => {
logger.info("[daily.sh]", data.toString().trim())
})
daily.stderr.on("data", (data) => {
logger.info("[daily.sh]", data.toString().trim())
})
daily.on("exit", (code) => {
logger.info("daily.sh exitted with code:", code)
})
}

var hourly_run = function(){
logger.info("about to run hourly.sh");

var hourly = spawn(`${scriptRootPath}/hourly.sh`)
hourly.stdout.on("data", (data) => {
logger.info("[hourly.sh]", data.toString().trim())
})
hourly.stderr.on("data", (data) => {
logger.info("[hourly.sh]", data.toString().trim())
})
hourly.on("exit", (code) => {
logger.info("hourly.sh exitted with code:", code)
})
}

var realtime_run = function(){
logger.info("about to run realtime.sh");

var realtime = spawn(`${scriptRootPath}/realtime.sh`)
realtime.stdout.on("data", (data) => {
logger.info("[realtime.sh]", data.toString().trim())
})
realtime.stderr.on("data", (data) => {
logger.info("[realtime.sh]", data.toString().trim())
})
realtime.on("exit", (code) => {
logger.info("realtime.sh exitted with code:", code)
})
}

/**
Daily reports run every morning at 10 AM UTC.
Expand All @@ -98,22 +114,24 @@ var calculateNextDailyRunTimeOffset = function(){
}

logger.info("starting cron.js!");
// api_run();
// daily_run();
// hourly_run();
// realtime_run();
// //api
// setInterval(api_run,1000 * 60 * 60 * 24)
// //daily
// setTimeout(() => {
// // Run at 10 AM UTC, then every 24 hours afterwards
// // daily_run();
// // setInterval(daily_run, 1000 * 60 * 60 * 24);
// //api
// api_run();
// setInterval(api_run,1000 * 60 * 60 * 24)
// }, calculateNextDailyRunTimeOffset());
// //hourly
// setInterval(hourly_run,1000 * 60 * 60);
// //realtime
// setInterval(realtime_run,1000 * 60 * 5);
api_run();
api_ua_run();
daily_run();
hourly_run();
realtime_run();
// daily
setTimeout(() => {
// Run at 10 AM UTC, then every 24 hours afterwards
daily_run();
setInterval(daily_run, 1000 * 60 * 60 * 24);
//api
api_run();
setInterval(api_run,1000 * 60 * 60 * 24)
//ua api
api_ua_run();
setInterval(api_ua_run,1000 * 60 * 60 * 24)
}, calculateNextDailyRunTimeOffset());
//hourly
setInterval(hourly_run,1000 * 60 * 60);
//realtime
setInterval(realtime_run,1000 * 60 * 5);
3 changes: 2 additions & 1 deletion deploy/envs/agency-international-development.env
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Agency for International Development
export ANALYTICS_REPORT_IDS="ga:68380943"
# USAID Agency
export ANALYTICS_REPORT_IDS="395450427"
export AGENCY_NAME=agency-international-development
export AWS_BUCKET_PATH=data/agency-international-development
3 changes: 2 additions & 1 deletion deploy/envs/agriculture.env
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# Department of Agriculture
export ANALYTICS_REPORT_IDS="ga:65240995"
# USDA Agency
export ANALYTICS_REPORT_IDS="395461442"
export AGENCY_NAME=agriculture
export AWS_BUCKET_PATH=data/agriculture

3 changes: 2 additions & 1 deletion deploy/envs/commerce.env
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Department of Commerce
export ANALYTICS_REPORT_IDS="ga:66877186"
# DOC Agency
export ANALYTICS_REPORT_IDS="395253935"
export AGENCY_NAME=commerce
export AWS_BUCKET_PATH=data/commerce
3 changes: 2 additions & 1 deletion deploy/envs/defense.env
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Department of Defense
export ANALYTICS_REPORT_IDS="ga:67120289"
# DOD Agency
export ANALYTICS_REPORT_IDS="395251747"
export AGENCY_NAME=defense
export AWS_BUCKET_PATH=data/defense
Loading

0 comments on commit 142adf4

Please sign in to comment.