diff --git a/.bin/direnv b/.bin/direnv new file mode 100755 index 0000000..8f2ffea --- /dev/null +++ b/.bin/direnv @@ -0,0 +1,230 @@ +#!/usr/bin/env bash +# shellcheck disable=SC2155 +# LISENCE: https://github.com/versenv/versenv/blob/HEAD/LICENSE +set -Eeu -o pipefail + +# versenv unique +exe_filename=direnv +env_key_version=DIRENV_VERSION +git_url_prefix=https://github.com/direnv/direnv + +GetProgramLatestStableVersion() { HeadURL "${git_url_prefix:?}/releases/latest" | awk -F"/tag/" "/^[Ll]ocation:/ {print \$2}" | tr -d "[:cntrl:]" | tr -d "^v" | tail -n 1; } +SubcommandGetProgramVersions() { git ls-remote --quiet --refs --tags "${git_url_prefix:?}.git" | grep -Eo "v?[0-9]+\.[0-9]+\.[0-9]+(-[^\"]+)?" | tr -d "^v" | sort -uV; } +SubcommandGetProgramStableVersions() { SubcommandGetProgramVersions | grep -E "[0-9]+\.[0-9]+\.[0-9]+$"; } + +InstallProgram() { + Prepare + # vars + local prog_version="${1:?}" + local prog_os && prog_os=$(uname -s | tr '[:upper:]' '[:lower:]') + local machine_arch && machine_arch=$(uname -m) + local prog_arch + if [[ ${machine_arch:?} = x86_64 ]]; then + prog_arch=amd64 + elif [[ ${machine_arch:?} = arm64 ]]; then + prog_arch=arm64 + else + RecCritical "arch (${machine_arch:?}) is not supported" + exit 1 + fi + local download_url && download_url="${git_url_prefix:?}/releases/download/v${prog_version:?}/${exe_filename:?}.${prog_os:?}-${prog_arch:?}" + local downloaded_path && downloaded_path=${prog_version_tmp_dir:?}/$(basename "${download_url:?}") + # download + RecNotice "Download ${download_url:?}" + DownloadURL "${download_url:?}" "${downloaded_path:?}" + # install + RecNotice "Install ${downloaded_path:?} to ${prog_version_exe:?}" + RecExec mv -f "${downloaded_path:?}" "${prog_version_exe:?}" + RecExec chmod +x "${prog_version_exe:?}" +} + +ExecProgram() { + Prepare + # install + if [[ ! -x "${prog_version_exe:?}" ]]; then + InstallProgram "${prog_version:?}" + fi + # exec + exec "${prog_version_exe:?}" "$@" <&0 +} + +# LISENCE: https://github.com/kunitsucom/rec.sh/blob/HEAD/LICENSE +# Common +if [ -t 2 ]; then REC_COLOR=true; else REC_COLOR=''; fi +_recRFC3339() { date "+%Y-%m-%dT%H:%M:%S%z" | sed "s/\(..\)$/:\1/"; } +_recCmd() { for a in "$@"; do if echo "${a:-}" | grep -Eq "[[:blank:]]"; then printf "'%s' " "${a:-}"; else printf "%s " "${a:-}"; fi; done | sed "s/ $//"; } +# Color +RecDefault() { test " ${REC_SEVERITY:-0}" -gt 000 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;35m} DEFAULT${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecDebug() { test " ${REC_SEVERITY:-0}" -gt 100 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;34m} DEBUG${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecInfo() { test " ${REC_SEVERITY:-0}" -gt 200 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;32m} INFO${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecNotice() { test " ${REC_SEVERITY:-0}" -gt 300 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;36m} NOTICE${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecWarning() { test " ${REC_SEVERITY:-0}" -gt 400 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;33m} WARNING${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecError() { test " ${REC_SEVERITY:-0}" -gt 500 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;31m} ERROR${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecCritical() { test " ${REC_SEVERITY:-0}" -gt 600 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;1;31m} CRITICAL${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecAlert() { test " ${REC_SEVERITY:-0}" -gt 700 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;41m} ALERT${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecEmergency() { test "${REC_SEVERITY:-0}" -gt 800 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;1;41m}EMERGENCY${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecExec() { RecInfo "$ $(_recCmd "$@")" && "$@"; } +RecRun() { _dlm="####R#E#C#D#E#L#I#M#I#T#E#R####" && _all=$({ _out=$("$@") && _rtn=$? || _rtn=$? && printf "\n%s" "${_dlm:?}${_out:-}" && return "${_rtn:-0}"; } 2>&1) && _rtn=$? || _rtn=$? && _dlmno=$(echo "${_all:-}" | sed -n "/${_dlm:?}/=") && _cmd=$(_recCmd "$@") && _stdout=$(echo "${_all:-}" | tail -n +"${_dlmno:-1}" | sed "s/^${_dlm:?}//") && _stderr=$(echo "${_all:-}" | head -n "${_dlmno:-1}" | grep -v "^${_dlm:?}") && RecInfo "$ ${_cmd:-}" && { [ -z "${_stdout:-}" ] || RecInfo "${_stdout:?}"; } && { [ -z "${_stderr:-}" ] || RecWarning "${_stderr:?}"; } && return "${_rtn:-0}"; } + +# versenv common +DownloadURL() { + local url="${1:?}" + local file="${2:?}" + if command -v curl >/dev/null; then + RecExec curl --tlsv1.2 --connect-timeout 2 --progress-bar -fLR "${url:?}" -o "${file:?}" + elif command -v wget >/dev/null; then + RecExec wget --secure-protocol=TLSv1_2 --dns-timeout=2 --connect-timeout=2 -q "${url:?}" -O "${file:?}" + else + RecCritical "command not found: curl or wget" + exit 127 + fi +} + +GetURLs() { + for arg in "$@"; do + local url="${arg:?}" + if command -v wget >/dev/null; then + wget --secure-protocol=TLSv1_2 --dns-timeout=2 --connect-timeout=2 -q -O- "${url:?}" + elif command -v curl >/dev/null; then + curl --tlsv1.2 --connect-timeout 2 -fLRSs "${url:?}" + else + RecCritical "command not found: curl or wget" + exit 127 + fi + done +} + +HeadURL() { + local url="${1:?}" + if command -v wget >/dev/null; then + LC_ALL=C wget --secure-protocol=TLSv1_2 --dns-timeout=2 --connect-timeout=2 -S --spider --max-redirect=0 "${url:?}" -O /dev/null 2>&1 | awk -F" " "/ / {print \$2}" + elif command -v curl >/dev/null; then + curl --tlsv1.2 --connect-timeout 2 -fIRSs "${url:?}" + else + RecCritical "command not found: curl or wget" + exit 127 + fi +} + +SubcommandSelfUpdate() { + local self_update_url="https://raw.githubusercontent.com/versenv/versenv/HEAD/bin/${exe_filename:?}" + local script_file_path="${0:?}" + local tmp_dir=/tmp/versenv/bin + local tmp_file=${tmp_dir:?}/${exe_filename:?} + local backup_file="${tmp_file:?}.backup" + RecNotice "Download ${self_update_url:?}" + mkdir -p "${tmp_dir:?}" + DownloadURL "${self_update_url:?}" "${tmp_file:?}" + RecNotice "Take backup ${script_file_path:?} to ${backup_file:?}" + RecExec mv -f "${script_file_path:?}" "${backup_file:?}" + RecNotice "Show the changes between old and new" + RecExec diff -u "${backup_file:?}" "${tmp_file:?}" || true + RecNotice "Update ${script_file_path:?} to ${self_update_url:?}" + RecExec chmod +x "${tmp_file:?}" + RecExec mv -f "${tmp_file:?}" "${script_file_path:?}" +} + +MustFoundCommands() { + # shellcheck disable=SC2207 + local not_found_commands=($( + for cmd in "$@"; do + if ! command -v "${cmd-}" 1>/dev/null; then + echo "${cmd-}" + fi + done + )) + if [[ "${#not_found_commands[@]}" -eq 0 ]]; then + return + fi + RecCritical "command not found: ${not_found_commands[*]}" + exit 127 +} + +ResolveProgramVersion() { + local version && version=$( + if [[ ${!env_key_version:-} = latest ]] || [[ ${!env_key_version:-} = stable ]]; then + GetProgramLatestStableVersion + elif [[ ${!env_key_version:-} ]]; then + echo "${!env_key_version:?}" + else + ver=$(GetProgramLatestStableVersion) + RecNotice "env ${env_key_version:?} is not set. Use latest stable version: ${env_key_version:?}=${ver:-"?"}" + echo "${ver:-}" + fi + ) + if [[ "${version:-}" ]]; then + echo "${version:?}" + return 0 + else + RecCritical "Failed to resolve version" + return 1 + fi +} + +Prepare() { + if [[ ${vers_prepared:-} = "${exe_filename:?}" ]]; then + return 0 + else + vers_prepared="${exe_filename:?}" + fi + # common vars + cache_dir=~/.cache/versenv + # vars + prog_version="$(ResolveProgramVersion)" + prog_versions_dir="${cache_dir:?}/${exe_filename:?}" + prog_version_dir="${prog_versions_dir:?}/${prog_version:?}" + prog_version_tmp_dir="${prog_version_dir:?}/tmp" + prog_version_bin_dir="${prog_version_dir:?}/bin" + prog_version_exe="${prog_version_bin_dir:?}/${exe_filename:?}" + # Create directories in idempotent + mkdir -p "${prog_version_tmp_dir:?}" "${prog_version_bin_dir:?}" +} + +Usage() { + cat <&2; } +LogshDebug() { test " ${LOGSH_LEVEL:-0}" -gt 100 || echo "$*" | awk "{print \"$(_logshRFC3339) [${LOGSH_COLOR:+\\033[0;34m} DEBUG${LOGSH_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +LogshInfo() { test " ${LOGSH_LEVEL:-0}" -gt 200 || echo "$*" | awk "{print \"$(_logshRFC3339) [${LOGSH_COLOR:+\\033[0;32m} INFO${LOGSH_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +LogshNotice() { test " ${LOGSH_LEVEL:-0}" -gt 300 || echo "$*" | awk "{print \"$(_logshRFC3339) [${LOGSH_COLOR:+\\033[0;36m} NOTICE${LOGSH_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +LogshWarn() { test " ${LOGSH_LEVEL:-0}" -gt 400 || echo "$*" | awk "{print \"$(_logshRFC3339) [${LOGSH_COLOR:+\\033[0;33m} WARN${LOGSH_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +LogshWarning() { test " ${LOGSH_LEVEL:-0}" -gt 400 || echo "$*" | awk "{print \"$(_logshRFC3339) [${LOGSH_COLOR:+\\033[0;33m} WARNING${LOGSH_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +LogshError() { test " ${LOGSH_LEVEL:-0}" -gt 500 || echo "$*" | awk "{print \"$(_logshRFC3339) [${LOGSH_COLOR:+\\033[0;31m} ERROR${LOGSH_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +LogshCritical() { test " ${LOGSH_LEVEL:-0}" -gt 600 || echo "$*" | awk "{print \"$(_logshRFC3339) [${LOGSH_COLOR:+\\033[0;1;31m} CRITICAL${LOGSH_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +LogshAlert() { test " ${LOGSH_LEVEL:-0}" -gt 700 || echo "$*" | awk "{print \"$(_logshRFC3339) [${LOGSH_COLOR:+\\033[0;41m} ALERT${LOGSH_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +LogshEmergency() { test "${LOGSH_LEVEL:-0}" -gt 800 || echo "$*" | awk "{print \"$(_logshRFC3339) [${LOGSH_COLOR:+\\033[0;1;41m}EMERGENCY${LOGSH_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +LogshExec() { LogshInfo "$ $(_logshCmd "$@")" && "$@"; } +LogshRun() { _dlm="####R#E#C#D#E#L#I#M#I#T#E#R####" && _all=$({ _out=$("$@") && _rtn=$? || _rtn=$? && printf "\n%s" "${_dlm:?}${_out:-}" && return "${_rtn:-0}"; } 2>&1) && _rtn=$? || _rtn=$? && _dlmno=$(echo "${_all:-}" | sed -n "/${_dlm:?}/=") && _cmd=$(_logshCmd "$@") && _stdout=$(echo "${_all:-}" | tail -n +"${_dlmno:-1}" | sed "s/^${_dlm:?}//") && _stderr=$(echo "${_all:-}" | head -n "${_dlmno:-1}" | grep -v "^${_dlm:?}") && LogshInfo "$ ${_cmd:-}" && LogshInfo "${_stdout:-}" && { [ -z "${_stderr:-}" ] || LogshWarning "${_stderr:?}"; } && return "${_rtn:-0}"; } + +__main__() { + cd "${REPO_ROOT:?}" || return $? # cd repo root + + targets=$( + find "${REPO_ROOT:?}" -name go.mod -print | # find go.mod + sed "s@${REPO_ROOT:?}/@@g; s@/*go\.mod@@g; s@^@./@;" | # trim repo root path + cat || true + ) + LogshInfo "targets:" "$(tr '\n' ' ' <<< "${targets:-none}")" + if [ -n "${targets:-}" ]; then + while read -r mod; do + LogshExec bash -c "cd ${REPO_ROOT:?}/${mod:?} && go mod tidy" + done <<<"${targets:?}" + fi +} + +__main__ "$@" diff --git a/.bin/golangci-lint b/.bin/golangci-lint new file mode 100755 index 0000000..f2eb2b0 --- /dev/null +++ b/.bin/golangci-lint @@ -0,0 +1,256 @@ +#!/usr/bin/env bash +# shellcheck disable=SC2155 +# LISENCE: https://github.com/versenv/versenv/blob/HEAD/LICENSE +set -Eeu -o pipefail + +# versenv unique +exe_filename=golangci-lint +env_key_version=GOLANGCI_LINT_VERSION +git_url_prefix=https://github.com/golangci/golangci-lint + +GetProgramLatestStableVersion() { HeadURL "${git_url_prefix:?}/releases/latest" | awk -F"/tag/" "/^[Ll]ocation:/ {print \$2}" | tr -d "[:cntrl:]" | tr -d "^v" | tail -n 1; } +SubcommandGetProgramVersions() { git ls-remote --quiet --refs --tags "${git_url_prefix:?}.git" | grep -Eo "v?[0-9]+\.[0-9]+\.[0-9]+(-[^\"]+)?" | tr -d "^v" | sort -uV; } +SubcommandGetProgramStableVersions() { SubcommandGetProgramVersions | grep -E "[0-9]+\.[0-9]+\.[0-9]+$"; } + +FindTargetZipDownloadURL() { + local prog_version="${1:?}" + local prog_os="${2:?}" + local prog_arch="${3:?}" + local path_suffix && path_suffix=$( + GetURLs "${git_url_prefix:?}/releases/expanded_assets/"{,v}"${prog_version:?}" | grep -Eo "href=\"[^\"]+/v?${prog_version:?}/[^\"]*${prog_os:?}[^\"]*${prog_arch:?}[^\"]*\.(zip|tar|tgz|tar\.gz)" | sed 's/href="//' + ) + echo "https://github.com${path_suffix:?}" +} + +Unzip() { + local source_zip_path="${1:?}" + local filename_in_archive="${2:?}" + local target_file_path="${3:?}" + if [[ "${source_zip_path:?}" =~ .+\.(tar|tgz|tar\.gz)$ ]]; then + local file_in_archive && file_in_archive=$(tar tvf "${source_zip_path:?}" | awk -F" " "/[\/ ]${filename_in_archive:?}$/ {print \$NF}") + RecExec bash -c "tar -O -xf \"${source_zip_path:?}\" \"${file_in_archive:?}\" > \"${target_file_path:?}\"" + elif [[ "${source_zip_path:?}" =~ .+\.zip$ ]]; then + local file_in_archive && file_in_archive=$(unzip -l "${source_zip_path:?}" | awk -F" " "/[\/ ]${filename_in_archive:?}$/ {print \$NF}") + RecExec bash -c "unzip -p -o \"${source_zip_path:?}\" \"${file_in_archive:?}\" > \"${target_file_path:?}\"" + fi +} + +InstallProgram() { + MustFoundCommands tar + Prepare + # vars + local prog_version="${1:?}" + local prog_os && prog_os=$(uname -s | tr '[:upper:]' '[:lower:]') + local machine_arch && machine_arch=$(uname -m) + local prog_arch + if [[ ${machine_arch:?} = x86_64 ]]; then + prog_arch=amd64 + elif [[ ${machine_arch:?} = arm64 ]]; then + prog_arch=arm64 + else + RecCritical "arch (${machine_arch:?}) is not supported" + exit 1 + fi + local download_url && download_url="$(FindTargetZipDownloadURL "${prog_version:?}" "${prog_os:?}" "${prog_arch:?}")" + local downloaded_path && downloaded_path=${prog_version_tmp_dir:?}/$(basename "${download_url:?}") + # download + RecNotice "Download ${download_url:?}" + DownloadURL "${download_url:?}" "${downloaded_path:?}" + # install + RecNotice "Install ${downloaded_path:?} to ${prog_version_exe:?}" + local unziped_path="${prog_version_tmp_dir:?}/${exe_filename:?}" + Unzip "${downloaded_path:?}" "${exe_filename:?}" "${unziped_path:?}" + RecExec mv -f "${unziped_path:?}" "${prog_version_exe:?}" + RecExec chmod +x "${prog_version_exe:?}" +} + +ExecProgram() { + Prepare + # install + if [[ ! -x "${prog_version_exe:?}" ]]; then + InstallProgram "${prog_version:?}" + fi + # exec + exec "${prog_version_exe:?}" "$@" <&0 +} + +# LISENCE: https://github.com/kunitsucom/rec.sh/blob/HEAD/LICENSE +# Common +if [ -t 2 ]; then REC_COLOR=true; else REC_COLOR=''; fi +_recRFC3339() { date "+%Y-%m-%dT%H:%M:%S%z" | sed "s/\(..\)$/:\1/"; } +_recCmd() { for a in "$@"; do if echo "${a:-}" | grep -Eq "[[:blank:]]"; then printf "'%s' " "${a:-}"; else printf "%s " "${a:-}"; fi; done | sed "s/ $//"; } +# Color +RecDefault() { test " ${REC_SEVERITY:-0}" -gt 000 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;35m} DEFAULT${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecDebug() { test " ${REC_SEVERITY:-0}" -gt 100 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;34m} DEBUG${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecInfo() { test " ${REC_SEVERITY:-0}" -gt 200 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;32m} INFO${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecNotice() { test " ${REC_SEVERITY:-0}" -gt 300 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;36m} NOTICE${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecWarning() { test " ${REC_SEVERITY:-0}" -gt 400 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;33m} WARNING${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecError() { test " ${REC_SEVERITY:-0}" -gt 500 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;31m} ERROR${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecCritical() { test " ${REC_SEVERITY:-0}" -gt 600 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;1;31m} CRITICAL${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecAlert() { test " ${REC_SEVERITY:-0}" -gt 700 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;41m} ALERT${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecEmergency() { test "${REC_SEVERITY:-0}" -gt 800 2>/dev/null || echo "$*" | awk "{print \"$(_recRFC3339) [${REC_COLOR:+\\033[0;1;41m}EMERGENCY${REC_COLOR:+\\033[0m}] \"\$0\"\"}" 1>&2; } +RecExec() { RecInfo "$ $(_recCmd "$@")" && "$@"; } +RecRun() { _dlm="####R#E#C#D#E#L#I#M#I#T#E#R####" && _all=$({ _out=$("$@") && _rtn=$? || _rtn=$? && printf "\n%s" "${_dlm:?}${_out:-}" && return "${_rtn:-0}"; } 2>&1) && _rtn=$? || _rtn=$? && _dlmno=$(echo "${_all:-}" | sed -n "/${_dlm:?}/=") && _cmd=$(_recCmd "$@") && _stdout=$(echo "${_all:-}" | tail -n +"${_dlmno:-1}" | sed "s/^${_dlm:?}//") && _stderr=$(echo "${_all:-}" | head -n "${_dlmno:-1}" | grep -v "^${_dlm:?}") && RecInfo "$ ${_cmd:-}" && { [ -z "${_stdout:-}" ] || RecInfo "${_stdout:?}"; } && { [ -z "${_stderr:-}" ] || RecWarning "${_stderr:?}"; } && return "${_rtn:-0}"; } + +# versenv common +DownloadURL() { + local url="${1:?}" + local file="${2:?}" + if command -v curl >/dev/null; then + RecExec curl --tlsv1.2 --connect-timeout 2 --progress-bar -fLR "${url:?}" -o "${file:?}" + elif command -v wget >/dev/null; then + RecExec wget --secure-protocol=TLSv1_2 --dns-timeout=2 --connect-timeout=2 -q "${url:?}" -O "${file:?}" + else + RecCritical "command not found: curl or wget" + exit 127 + fi +} + +GetURLs() { + for arg in "$@"; do + local url="${arg:?}" + if command -v wget >/dev/null; then + wget --secure-protocol=TLSv1_2 --dns-timeout=2 --connect-timeout=2 -q -O- "${url:?}" + elif command -v curl >/dev/null; then + curl --tlsv1.2 --connect-timeout 2 -fLRSs "${url:?}" + else + RecCritical "command not found: curl or wget" + exit 127 + fi + done +} + +HeadURL() { + local url="${1:?}" + if command -v wget >/dev/null; then + LC_ALL=C wget --secure-protocol=TLSv1_2 --dns-timeout=2 --connect-timeout=2 -S --spider --max-redirect=0 "${url:?}" -O /dev/null 2>&1 | awk -F" " "/ / {print \$2}" + elif command -v curl >/dev/null; then + curl --tlsv1.2 --connect-timeout 2 -fIRSs "${url:?}" + else + RecCritical "command not found: curl or wget" + exit 127 + fi +} + +SubcommandSelfUpdate() { + local self_update_url="https://raw.githubusercontent.com/versenv/versenv/HEAD/bin/${exe_filename:?}" + local script_file_path="${0:?}" + local tmp_dir=/tmp/versenv/bin + local tmp_file=${tmp_dir:?}/${exe_filename:?} + local backup_file="${tmp_file:?}.backup" + RecNotice "Download ${self_update_url:?}" + mkdir -p "${tmp_dir:?}" + DownloadURL "${self_update_url:?}" "${tmp_file:?}" + RecNotice "Take backup ${script_file_path:?} to ${backup_file:?}" + RecExec mv -f "${script_file_path:?}" "${backup_file:?}" + RecNotice "Show the changes between old and new" + RecExec diff -u "${backup_file:?}" "${tmp_file:?}" || true + RecNotice "Update ${script_file_path:?} to ${self_update_url:?}" + RecExec chmod +x "${tmp_file:?}" + RecExec mv -f "${tmp_file:?}" "${script_file_path:?}" +} + +MustFoundCommands() { + # shellcheck disable=SC2207 + local not_found_commands=($( + for cmd in "$@"; do + if ! command -v "${cmd-}" 1>/dev/null; then + echo "${cmd-}" + fi + done + )) + if [[ "${#not_found_commands[@]}" -eq 0 ]]; then + return + fi + RecCritical "command not found: ${not_found_commands[*]}" + exit 127 +} + +ResolveProgramVersion() { + local version && version=$( + if [[ ${!env_key_version:-} = latest ]] || [[ ${!env_key_version:-} = stable ]]; then + GetProgramLatestStableVersion + elif [[ ${!env_key_version:-} ]]; then + echo "${!env_key_version:?}" + else + ver=$(GetProgramLatestStableVersion) + RecNotice "env ${env_key_version:?} is not set. Use latest stable version: ${env_key_version:?}=${ver:-"?"}" + echo "${ver:-}" + fi + ) + if [[ "${version:-}" ]]; then + echo "${version:?}" + return 0 + else + RecCritical "Failed to resolve version" + return 1 + fi +} + +Prepare() { + if [[ ${vers_prepared:-} = "${exe_filename:?}" ]]; then + return 0 + else + vers_prepared="${exe_filename:?}" + fi + # common vars + cache_dir=~/.cache/versenv + # vars + prog_version="$(ResolveProgramVersion)" + prog_versions_dir="${cache_dir:?}/${exe_filename:?}" + prog_version_dir="${prog_versions_dir:?}/${prog_version:?}" + prog_version_tmp_dir="${prog_version_dir:?}/tmp" + prog_version_bin_dir="${prog_version_dir:?}/bin" + prog_version_exe="${prog_version_bin_dir:?}/${exe_filename:?}" + # Create directories in idempotent + mkdir -p "${prog_version_tmp_dir:?}" "${prog_version_bin_dir:?}" +} + +Usage() { + cat < + +## Ticket / Issue Number + +> **Note** +> *Please fill in the ticket or issue number.* +> > Example: +> > +> > #3 + +## What's changed + +> **Note** +> *Please explain what changes this pull request will make.* +> > Example: +> > +> > * Added functionality to perform 'bar' on 'foo'. + +## Check List + +- [ ] Assign labels +- [ ] Add appropriate test cases + +## Remark + +> **Note** +> *Please provide additional remarks if necessary.* + + diff --git a/.github/release.yml b/.github/release.yml new file mode 100644 index 0000000..d60acb1 --- /dev/null +++ b/.github/release.yml @@ -0,0 +1,38 @@ +# https://docs.github.com/ja/repositories/releasing-projects-on-github/automatically-generated-release-notes +changelog: + categories: + - title: "💥 BREAKING CHANGES" + labels: + - BREAKING CHANGE + - title: "⬆️ Changes that affect the build system or external dependencies (example scopes: gulp, broccoli, npm)" + labels: + - build + - dependencies + - title: "👷 Changes to our CI configuration files and scripts (examples: CircleCi, SauceLabs)" + labels: + - ci + - title: "📝 Documentation only changes" + labels: + - docs + - title: "✨ A new feature" + labels: + - feat + - title: "🐛 A bug fix" + labels: + - fix + - title: "⚡️ A code change that improves performance" + labels: + - perf + - title: "♻️ A code change that neither fixes a bug nor adds a feature" + labels: + - refactor + - title: "✅ Adding missing tests or correcting existing tests" + labels: + - test + - title: "🚨 Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc)" + labels: + - style + - title: "🧑‍💻 Changes to the build process or auxiliary tools and libraries such as documentation generation" + labels: + - chore + - "*" diff --git a/.github/workflows/auto-assign.yml b/.github/workflows/auto-assign.yml new file mode 100644 index 0000000..54da72f --- /dev/null +++ b/.github/workflows/auto-assign.yml @@ -0,0 +1,24 @@ +# ref. https://zenn.dev/snowcait/articles/d6bc5eafd8ab75 +name: 'Auto Assign' + +on: + pull_request: + types: + - opened + - ready_for_review + - reopened + +jobs: + assign: + name: Auto Assign + if: github.actor != 'dependabot[bot]' + runs-on: ubuntu-latest + timeout-minutes: 2 + steps: + - if: ${{ toJSON(github.event.pull_request.assignees) == '[]' }} + run: gh pr edit "${NUMBER}" --add-assignee "${ASSIGNEE}" + env: + GH_TOKEN: ${{ github.token }} + GH_REPO: ${{ github.repository }} + NUMBER: ${{ github.event.pull_request.number }} + ASSIGNEE: ${{ github.event.pull_request.user.login }} diff --git a/.github/workflows/go-lint.yml b/.github/workflows/go-lint.yml new file mode 100644 index 0000000..b089569 --- /dev/null +++ b/.github/workflows/go-lint.yml @@ -0,0 +1,124 @@ +name: go-lint +# ^^^^^^^ +# https://github.com/organization/repository/workflows/go-lint/badge.svg +# ^^^^^^^ + +on: + push: + branches: + - main + paths-ignore: + - '.github/dependabot.yml' + - '.github/pull_request_template.md' + - '.github/release.yml' + - '.github/workflows/label-checker.yml' + - '.github/workflows/task-list-checker.yml' + - '**.md' + pull_request: + paths-ignore: + - '.github/dependabot.yml' + - '.github/pull_request_template.md' + - '.github/release.yml' + - '.github/workflows/label-checker.yml' + - '.github/workflows/task-list-checker.yml' + - '**.md' + workflow_dispatch: + inputs: {} + +# NOTE: If commit & push continuously, cancel the workflow other than the latest commit. +concurrency: + group: ${{ github.workflow }}-${{ github.base_ref }}-${{ github.head_ref }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + id-token: write + contents: read + +env: + WORKDIR: . + +defaults: + run: + shell: bash + +jobs: + go-lint: # NOTE: for Branch protection rule `Status checks that are required.` + name: Run golangci-lint + runs-on: ubuntu-latest # ref. https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions#jobsjob_idruns-on + steps: + - uses: actions/checkout@v4 + - name: DEBUG + shell: bash + run: | + cat <<'DEBUG_DOC' + == DEBUG ======================================================= + github.ref: ${{ github.ref }} + github.event_name: ${{ github.event_name }} + -- toJSON(github.event.inputs) --------------------------------- + ${{ toJSON(github.event.inputs) }} + -- toJSON(github) ---------------------------------------------- + ${{ toJSON(github) }} + ================================================================ + DEBUG_DOC + - name: actions/cache for versenv + uses: actions/cache@v3 + with: + path: | + ~/.cache/versenv + key: versenv-${{ runner.os }}-${{ hashFiles('**/.versenv.env') }} + restore-keys: | + versenv-${{ runner.os }}- + - name: Add GITHUB_PATH, GITHUB_ENV + shell: bash + run: | + # Update GITHUB_PATH + cat <> $GITHUB_PATH + ${PWD}/.local/bin + ${PWD}/${{ env.WORKDIR }}/.local/bin + ${PWD}/.bin + GITHUB_PATH + # Update GITHUB_ENV + grep -Ev '^\s*$|^\s*#' .versenv.env >> $GITHUB_ENV + - name: Setup versenv + shell: bash + run: | + # Setup versenv + direnv allow ${{ env.WORKDIR }} + make versenv + - uses: actions/setup-go@v4 # ref. https://github.com/actions/setup-go#usage + id: setup-go + with: + cache: false + go-version-file: ${{ env.WORKDIR }}/go.mod + - name: Get Golang info + id: golang-info + shell: bash + run: | + echo "GOVERSION=$(go version | cut -d' ' -f3)" >> "$GITHUB_OUTPUT" + echo "GOCACHE=$(go env GOCACHE)" >> "$GITHUB_OUTPUT" + - name: actions/cache for go + uses: actions/cache@v3 + with: + path: | + ~/go/pkg/mod + ${{ steps.golang-info.outputs.GOCACHE }} + key: ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}-${{ hashFiles('**/go.sum') }}-${{ hashFiles('**/*.go') }} + restore-keys: | + ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}-${{ hashFiles('**/go.sum') }}- + ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}- + ${{ runner.os }}-go- + - name: Setup git config for go mod download + env: + DEBIAN_FRONTEND: noninteractive + # GITHUB_TOKEN: ${{ secrets.GH_BOT_TOKEN }} + working-directory: ${{ env.WORKDIR }} + shell: bash + run: | + set -Eeu -o pipefail -x + direnv allow . + direnv exec . bash -Eeux -o pipefail -c 'echo "${GOPRIVATE:-}${GOPRIVATE+,}" | while read -d , -r LINE; do echo "set git config: ${LINE}"; git config --global url."https://${GITHUB_TOKEN}:x-oauth-basic@${LINE}".insteadOf "https://${LINE}"; done' + - uses: golangci/golangci-lint-action@v3.7.0 # ref. https://github.com/golangci/golangci-lint-action#how-to-use + with: + working-directory: ${{ env.WORKDIR }} + args: --timeout=600s + version: latest # or v${{ env.GOLANGCI_LINT_VERSION }} diff --git a/.github/workflows/go-mod-tidy.yml b/.github/workflows/go-mod-tidy.yml new file mode 100644 index 0000000..9707747 --- /dev/null +++ b/.github/workflows/go-mod-tidy.yml @@ -0,0 +1,132 @@ +name: go-mod-tidy + +on: + pull_request: + types: + - synchronize + paths-ignore: + - '.github/dependabot.yml' + - '.github/pull_request_template.md' + - '.github/release.yml' + - '.github/workflows/label-checker.yml' + - '.github/workflows/task-list-checker.yml' + - '**.md' + workflow_dispatch: + inputs: {} + +# NOTE: If commit & push continuously, cancel the workflow other than the latest commit. +concurrency: + group: ${{ github.workflow }}-${{ github.base_ref }}-${{ github.head_ref }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + id-token: write + contents: read + +env: + WORKDIR: . + +defaults: + run: + shell: bash + +jobs: + go-mod-tidy: # NOTE: for Branch protection rule `Status checks that are required.` + name: Run go mod tidy + permissions: + id-token: write + contents: write + runs-on: ubuntu-latest # ref. https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions#jobsjob_idruns-on + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.ref }} # needed for gh pr view + - name: DEBUG + shell: bash + run: | + cat <<'DEBUG_DOC' + == DEBUG ======================================================= + github.ref: ${{ github.ref }} + github.event_name: ${{ github.event_name }} + -- toJSON(github.event.inputs) --------------------------------- + ${{ toJSON(github.event.inputs) }} + -- toJSON(github) ---------------------------------------------- + ${{ toJSON(github) }} + ================================================================ + DEBUG_DOC + - name: actions/cache for versenv + uses: actions/cache@v3 + with: + path: | + ~/.cache/versenv + key: versenv-${{ runner.os }}-${{ hashFiles('**/.versenv.env') }} + restore-keys: | + versenv-${{ runner.os }}- + - name: Add GITHUB_PATH, GITHUB_ENV + shell: bash + run: | + # Update GITHUB_PATH + cat <> $GITHUB_PATH + ${PWD}/.local/bin + ${PWD}/${{ env.WORKDIR }}/.local/bin + ${PWD}/.bin + GITHUB_PATH + # Update GITHUB_ENV + grep -Ev '^\s*$|^\s*#' .versenv.env >> $GITHUB_ENV + - name: Setup versenv + shell: bash + run: | + # Setup versenv + direnv allow ${{ env.WORKDIR }} + make versenv + - uses: actions/setup-go@v4 # ref. https://github.com/actions/setup-go#usage + id: setup-go + with: + cache: false + go-version-file: ${{ env.WORKDIR }}/go.mod + - name: Get Golang info + id: golang-info + shell: bash + run: | + echo "GOVERSION=$(go version | cut -d' ' -f3)" >> "$GITHUB_OUTPUT" + echo "GOCACHE=$(go env GOCACHE)" >> "$GITHUB_OUTPUT" + - name: actions/cache for go + uses: actions/cache@v3 + with: + path: | + ~/go/pkg/mod + ${{ steps.golang-info.outputs.GOCACHE }} + key: ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}-${{ hashFiles('**/go.sum') }}-${{ hashFiles('**/*.go') }} + restore-keys: | + ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}-${{ hashFiles('**/go.sum') }}- + ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}- + ${{ runner.os }}-go- + - name: Setup git config for go mod download + env: + DEBIAN_FRONTEND: noninteractive + # GITHUB_TOKEN: ${{ secrets.GH_BOT_TOKEN }} + working-directory: ${{ env.WORKDIR }} + shell: bash + run: | + set -Eeu -o pipefail -x + direnv allow . + direnv exec . bash -Eeux -o pipefail -c 'echo "${GOPRIVATE:-}${GOPRIVATE+,}" | while read -d , -r LINE; do echo "set git config: ${LINE}"; git config --global url."https://${GITHUB_TOKEN}:x-oauth-basic@${LINE}".insteadOf "https://${LINE}"; done' + - name: Run go mod tidy + if: ${{ steps.setup-go.outputs.cache-hit != 'true' }} + env: + DEBIAN_FRONTEND: noninteractive + # GITHUB_TOKEN: ${{ secrets.GH_BOT_TOKEN }} + working-directory: ${{ env.WORKDIR }} + shell: bash + run: | + direnv exec . go-mod-tidy-all + git config --local user.name "github-actions[bot]" + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + DIFF_FILES=$(git diff --name-only) + if [ -z "${DIFF_FILES-}" ]; then + echo "No changes to the output on this push; exiting." + exit 0 + fi + echo "${DIFF_FILES:?}" | grep -E "go\.(mod|sum)" | xargs -t git add + git commit -m "build: go mod tidy (by github-actions[bot])" + git push diff --git a/.github/workflows/go-test.yml b/.github/workflows/go-test.yml new file mode 100644 index 0000000..bcdad1d --- /dev/null +++ b/.github/workflows/go-test.yml @@ -0,0 +1,144 @@ +name: go-test +# ^^^^^^^ +# https://github.com/organization/repository/workflows/go-test/badge.svg +# ^^^^^^^ + +on: + push: + branches: + - main + paths-ignore: + - '.github/dependabot.yml' + - '.github/pull_request_template.md' + - '.github/release.yml' + - '.github/workflows/label-checker.yml' + - '.github/workflows/task-list-checker.yml' + - '**.md' + pull_request: + paths-ignore: + - '.github/dependabot.yml' + - '.github/pull_request_template.md' + - '.github/release.yml' + - '.github/workflows/label-checker.yml' + - '.github/workflows/task-list-checker.yml' + - '**.md' + workflow_dispatch: + inputs: {} + +# NOTE: If commit & push continuously, cancel the workflow other than the latest commit. +concurrency: + group: ${{ github.workflow }}-${{ github.base_ref }}-${{ github.head_ref }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + id-token: write + contents: read + +env: + DOCKER_BUILD_CACHE_FROM: /tmp/.docker-build-buildx-cache-from + DOCKER_BUILD_CACHE_TO: /tmp/.docker-build-buildx-cache-to + WORKDIR: . + +defaults: + run: + shell: bash + +jobs: + go-test: # NOTE: for Branch protection rule `Status checks that are required.` + name: Run go test + runs-on: ubuntu-latest # ref. https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions#jobsjob_idruns-on + steps: + - uses: actions/checkout@v4 + - name: DEBUG + shell: bash + run: | + cat <<'DEBUG_DOC' + == DEBUG ======================================================= + github.ref: ${{ github.ref }} + github.event_name: ${{ github.event_name }} + -- toJSON(github.event.inputs) --------------------------------- + ${{ toJSON(github.event.inputs) }} + -- toJSON(github) ---------------------------------------------- + ${{ toJSON(github) }} + ================================================================ + DEBUG_DOC + - name: actions/cache for versenv + uses: actions/cache@v3 + with: + path: | + ~/.cache/versenv + key: versenv-${{ runner.os }}-${{ hashFiles('**/.versenv.env') }} + restore-keys: | + versenv-${{ runner.os }}- + - name: Add GITHUB_PATH, GITHUB_ENV + run: | + # Update GITHUB_PATH + cat <> $GITHUB_PATH + ${PWD}/.local/bin + ${PWD}/${{ env.WORKDIR }}/.local/bin + ${PWD}/.bin + GITHUB_PATH + # Update GITHUB_ENV + grep -Ev '^\s*$|^\s*#' .versenv.env >> $GITHUB_ENV + - name: Setup versenv + run: | + # Setup versenv + direnv allow ${{ env.WORKDIR }} + make versenv + - uses: actions/setup-go@v4 # ref. https://github.com/actions/setup-go#usage + id: setup-go + with: + cache: false + go-version-file: ${{ env.WORKDIR }}/go.mod + - name: Get Golang info + id: golang-info + shell: bash + run: | + echo "GOVERSION=$(go version | cut -d' ' -f3)" >> "$GITHUB_OUTPUT" + echo "GOCACHE=$(go env GOCACHE)" >> "$GITHUB_OUTPUT" + - name: actions/cache for go + uses: actions/cache@v3 + with: + path: | + ~/go/pkg/mod + ${{ steps.golang-info.outputs.GOCACHE }} + key: ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}-${{ hashFiles('**/go.sum') }}-${{ hashFiles('**/*.go') }} + restore-keys: | + ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}-${{ hashFiles('**/go.sum') }}- + ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}- + ${{ runner.os }}-go- + # MEMO: Enable docker compose layer cache from here + - name: Set up Buildx for docker build cache + uses: docker/setup-buildx-action@v3 + - name: Use docker build cache + uses: actions/cache@v3 + with: + path: ${{ env.DOCKER_BUILD_CACHE_FROM }} + key: docker-build-buildx-${{ github.sha }} + restore-keys: | + docker-build-buildx- + # MEMO: Enable docker compose layer cache until here + - name: Run go test + env: + DEBIAN_FRONTEND: noninteractive + # for docker build cache + GHA_CACHE_OPTS: --cache-from type=local,src=${{ env.DOCKER_BUILD_CACHE_FROM }} --cache-to type=local,dest=${{ env.DOCKER_BUILD_CACHE_TO }},mode=max + # GITHUB_TOKEN: ${{ secrets.GH_BOT_TOKEN }} + working-directory: ${{ env.WORKDIR }} + run: | + set -Eeux -o pipefail + direnv allow . + direnv exec . bash -Eeux -o pipefail -c 'echo "${GOPRIVATE:-}${GOPRIVATE+,}" | while read -d , -r LINE; do echo "set git config: ${LINE}"; git config --global url."https://${GITHUB_TOKEN}:x-oauth-basic@${LINE}".insteadOf "https://${LINE}"; done' + direnv exec . make test + - uses: codecov/codecov-action@v3 # ref. https://github.com/codecov/codecov-action#example-workflowyml-with-codecov-action + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: ${{ env.WORKDIR }}/coverage.txt + - name: Move docker build cache (workaround) + if: always() + run: | + if [[ -d ${{ env.DOCKER_BUILD_CACHE_TO }} ]]; then + rm -rf ${{ env.DOCKER_BUILD_CACHE_FROM }} + mv ${{ env.DOCKER_BUILD_CACHE_TO }} ${{ env.DOCKER_BUILD_CACHE_FROM }} + fi + shell: bash diff --git a/.github/workflows/go-vuln.yml b/.github/workflows/go-vuln.yml new file mode 100644 index 0000000..c292f9d --- /dev/null +++ b/.github/workflows/go-vuln.yml @@ -0,0 +1,117 @@ +name: go-vuln +# ^^^^^^^ +# https://github.com/organization/repository/workflows/go-vuln/badge.svg +# ^^^^^^^ + +on: + push: + branches: + - main + paths-ignore: + - '.github/dependabot.yml' + - '.github/pull_request_template.md' + - '.github/release.yml' + - '.github/workflows/label-checker.yml' + - '.github/workflows/task-list-checker.yml' + - '**.md' + pull_request: + paths-ignore: + - '.github/dependabot.yml' + - '.github/pull_request_template.md' + - '.github/release.yml' + - '.github/workflows/label-checker.yml' + - '.github/workflows/task-list-checker.yml' + - '**.md' + workflow_dispatch: + inputs: {} + +# NOTE: If commit & push continuously, cancel the workflow other than the latest commit. +concurrency: + group: ${{ github.workflow }}-${{ github.base_ref }}-${{ github.head_ref }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + id-token: write + contents: read + +env: + WORKDIR: . + +defaults: + run: + shell: bash + +jobs: + go-vuln: # NOTE: for Branch protection rule `Status checks that are required.` + name: Check Golang vulnerability + runs-on: ubuntu-latest # ref. https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions#jobsjob_idruns-on + steps: + - uses: actions/checkout@v4 + - name: DEBUG + shell: bash + run: | + cat <<'DEBUG_DOC' + == DEBUG ======================================================= + github.ref: ${{ github.ref }} + github.ref_name: ${{ github.ref_name }} + github.event_name: ${{ github.event_name }} + -- toJSON(github.event.inputs) --------------------------------- + ${{ toJSON(github.event.inputs) }} + -- toJSON(github) ---------------------------------------------- + ${{ toJSON(github) }} + ================================================================ + DEBUG_DOC + - name: actions/cache for versenv + uses: actions/cache@v3 + with: + path: | + ~/.cache/versenv + key: versenv-${{ runner.os }}-${{ hashFiles('**/.versenv.env') }} + restore-keys: | + versenv-${{ runner.os }}- + - name: Add GITHUB_PATH, GITHUB_ENV + shell: bash + run: | + # Update GITHUB_PATH + cat <> $GITHUB_PATH + ${PWD}/.local/bin + ${PWD}/${{ env.WORKDIR }}/.local/bin + ${PWD}/.bin + GITHUB_PATH + # Update GITHUB_ENV + grep -Ev '^\s*$|^\s*#' .versenv.env >> $GITHUB_ENV + - name: Setup versenv + shell: bash + run: | + # Setup versenv + direnv allow ${{ env.WORKDIR }} + make versenv + - uses: actions/setup-go@v4 # ref. https://github.com/actions/setup-go#usage + id: setup-go + with: + cache: false + go-version-file: ${{ env.WORKDIR }}/go.mod + - name: Get Golang info + id: golang-info + shell: bash + run: | + echo "GOVERSION=$(go version | cut -d' ' -f3)" >> "$GITHUB_OUTPUT" + echo "GOCACHE=$(go env GOCACHE)" >> "$GITHUB_OUTPUT" + - name: actions/cache for go + uses: actions/cache@v3 + with: + path: | + ~/go/pkg/mod + ${{ steps.golang-info.outputs.GOCACHE }} + key: ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}-${{ hashFiles('**/go.sum') }}-${{ hashFiles('**/*.go') }} + restore-keys: | + ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}-${{ hashFiles('**/go.sum') }}- + ${{ runner.os }}-go-${{ steps.golang-info.outputs.GOVERSION }}- + ${{ runner.os }}-go- + - id: govulncheck + uses: golang/govulncheck-action@v1 + with: + work-dir: ${{ env.WORKDIR }} + go-version-file: ${{ env.WORKDIR }}/go.mod + go-package: ./... + repo-checkout: false diff --git a/.github/workflows/label-checker.yml b/.github/workflows/label-checker.yml new file mode 100644 index 0000000..7e44dd0 --- /dev/null +++ b/.github/workflows/label-checker.yml @@ -0,0 +1,78 @@ +name: label-checker + +# Dependabot +# gh label create --force "dependencies" --description "Pull requests that update a dependency file" --color 0366D6 +# gh label create --force "go" --description "Pull requests that update Go code" --color 16E2E2 +# gh label create --force "github_actions" --description "Pull requests that update GitHub Actions code" --color 000000 +# gh label create --force "docker" --description "Pull requests that update Docker code" --color 21CEFF + +# Commit prefix +# gh label create --force "BREAKING CHANGE" --description "BREAKING CHANGES" --color FF0303 +# gh label create --force "build" --description "Changes that affect the build system or external dependencies (example scopes: gulp, broccoli, npm)" --color 5319E7 +# gh label create --force "ci" --description "Changes to our CI configuration files and scripts (examples: CircleCi, SauceLabs)" --color 53C4EE +# gh label create --force "docs" --description "Documentation only changes" --color 1B3E44 +# gh label create --force "feat" --description "A new feature" --color 0EAA80 +# gh label create --force "fix" --description "A bug fix" --color 1D76DB +# gh label create --force "perf" --description "A code change that improves performance" --color A2EEEF +# gh label create --force "refactor" --description "A code change that neither fixes a bug nor adds a feature" --color C5DEF5 +# gh label create --force "test" --description "Adding missing tests or correcting existing tests" --color 1D76DB +# gh label create --force "chore" --description "Changes to the build process or auxiliary tools and libraries such as documentation generation" --color 20313F + +on: + pull_request: + types: + - opened + - edited + - labeled + - unlabeled + - ready_for_review + - reopened + - synchronize + +env: + LABELS: "BREAKING CHANGE,build,ci,docs,feat,fix,perf,refactor,test,chore" + +jobs: + check: + name: Check labels + runs-on: ubuntu-latest + timeout-minutes: 10 + permissions: + id-token: write + contents: read + pull-requests: write # required to add labels + statuses: read + checks: read + repository-projects: read + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.ref }} # needed for gh pr view + - name: Check labels + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + shell: bash + run: | + # Check labels + set -Eeuo pipefail -x + + # If any of the labels are present, exit with success + if [[ -n "$(gh pr view --json labels --jq ".labels[].name" | grep -E "^($(tr , "|" <<<"${LABELS:?}"))$")" ]]; then + exit 0 + fi + + # Add labels based on PR title + GH_PR_TITLE=$(gh pr view --json title --jq .title) + gh label list --json name --jq ".[].name" | while read -r LINE; do + awk -F: "/^${LINE-}(\([^\)]+\))?:/ {print \$1}" <<<"${GH_PR_TITLE:?}" | grep -Eo "^${LINE:?}" || true # NOTE: Ignore the return value of grep because we just want to output the string + done | xargs -t -I{} gh pr edit --add-label {} + + # If any of the labels are present, exit with success + while read -r LINE; do + if grep -E "^($(tr , "|" <<<"${LABELS:?}"))$" <<<"${LINE-}"; then + exit 0 + fi + done <<<"$(gh pr view --json labels --jq ".labels[].name")" + + # If none of the labels are present, exit with error + exit 1 diff --git a/.github/workflows/task-list-checker.yml b/.github/workflows/task-list-checker.yml new file mode 100644 index 0000000..74bcaa0 --- /dev/null +++ b/.github/workflows/task-list-checker.yml @@ -0,0 +1,29 @@ +name: task-list-checker + +on: + pull_request: + types: + - opened + - edited + - ready_for_review + - reopened + - synchronize + +jobs: + check: + name: Check task list + runs-on: ubuntu-latest + timeout-minutes: 10 + permissions: + id-token: write + contents: read + pull-requests: read + statuses: write + defaults: + run: + shell: bash + steps: + - name: Check for incomplete task list items + uses: Shopify/task-list-checker@7cab3e5a969a34f8c553974bc6f8940d8c3978de # ref. https://github.com/Shopify/task-list-checker + with: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2dd2a12 --- /dev/null +++ b/.gitignore @@ -0,0 +1,23 @@ +# common +.DS_Store +Thumbs.db +*.swp +.*.swp +:* +*~ +.*~ +**/.env +/.local/ +/.tmp/ +/tmp/ +*.mp4 + +# go +/cmd/sandbox/** + +# codecov +coverage.txt* +coverage.html + +# unique +/arcgen diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 0000000..52d0f0d --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,62 @@ +run: + skip-files: + - cmd/sandbox/main.go + - internal/integrationtest/samples/.*\.go + modules-download-mode: readonly + allow-parallel-runners: true + +# https://golangci-lint.run/usage/linters/ +linters: + enable-all: true + disable: + - depguard # unnecessary + - dupl # disable to avoid encouraging excessive DRY + - exhaustruct # https://github.com/GaijinEntertainment/go-exhaustruct + - exhaustivestruct # https://github.com/mbilski/exhaustivestruct + - gci # unnecessary + - godox # unnecessary + - golint # deprecated https://github.com/golang/lint + - gomnd # https://github.com/tommy-muehle/go-mnd + - gosmopolitan # unnecessary + - ifshort # for readability + - interfacebloat # unnecessary + - interfacer # deprecated https://github.com/mvdan/interfacer + - lll # unnecessary + - maligned # deprecated https://github.com/mdempsky/maligned + - nlreturn # ignore "return with no blank line before" + - nolintlint # unnecessary + - nonamedreturns # unnecessary + - nosnakecase # for environment variable key + - varnamelen # unnecessary + - wsl # ignore "declarations should never be cuddled" + +linters-settings: + godot: + period: false + goimports: + local-prefixes: "github.com/kunitsucom/arcgen" + wrapcheck: + ignoreSigs: + - func errors.New(text string) error + - func fmt.Errorf(format string, a ...any) error + - func fmt.Errorf(format string, a ...interface{}) error + - func github.com/kunitsucom/util.go/errors.Errorf(format string, a ...interface{}) error + + +issues: + exclude-rules: + - path: _test\.go + linters: + - containedctx + - cyclop + - dupl + - funlen + - gochecknoglobals + - gocognit + - goconst + - goerr113 + - ireturn + - maintidx + - noctx + - varnamelen + - wrapcheck diff --git a/.versenv.env b/.versenv.env new file mode 100644 index 0000000..05f2813 --- /dev/null +++ b/.versenv.env @@ -0,0 +1,8 @@ +# NOTE: Define environment variables for https://github.com/versenv/versenv + +# https://github.com/direnv/direnv/releases +DIRENV_VERSION=2.32.3 + +# https://github.com/golangci/golangci-lint/releases +# GOLANGCI_LINT_VERSION=latest +GOLANGCI_LINT_VERSION=latest diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..a49f4fa --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2022 kunitsucom + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..26b9666 --- /dev/null +++ b/Makefile @@ -0,0 +1,95 @@ +export SHELL := /usr/bin/env bash -Eeu -o pipefail +export REPO_ROOT := $(shell git rev-parse --show-toplevel || exit 1) +export REPO_LOCAL_DIR := ${REPO_ROOT}/.local +export PATH := ${REPO_LOCAL_DIR}/bin:${REPO_ROOT}/.bin:${PATH} +export REPO_TMP_DIR := ${REPO_ROOT}/.tmp +export PRE_PUSH := ${REPO_ROOT}/.git/hooks/pre-push +export GIT_TAG_LATEST := $(shell git describe --tags --abbrev=0) +export GIT_BRANCH_CURRENT := $(shell git rev-parse --abbrev-ref HEAD) +export GO_MODULE_NAME := github.com/kunitsucom/arcgen +export BUILD_VERSION := $(shell git describe --tags --exact-match HEAD 2>/dev/null || git rev-parse --short HEAD) +export BUILD_REVISION := $(shell git rev-parse HEAD) +export BUILD_BRANCH := $(shell git rev-parse --abbrev-ref HEAD | tr / -) +export BUILD_TIMESTAMP := $(shell git log -n 1 --format='%cI') + +.DEFAULT_GOAL := help +.PHONY: help +help: githooks ## display this help documents + @grep -E '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-40s\033[0m %s\n", $$1, $$2}' + +.PHONY: setup +setup: githooks ## Setup tools for development + # == SETUP ===================================================== + # versenv + make versenv + # -------------------------------------------------------------- + +.PHONY: versenv +versenv: + # direnv + direnv allow . + # golangci-lint + golangci-lint --version + +.PHONY: githooks +githooks: + @[[ -f "${PRE_PUSH}" ]] || cp -ai "${REPO_ROOT}/.githooks/pre-push" "${PRE_PUSH}" + +clean: ## Clean up chace, etc + go clean -x -cache -testcache -modcache -fuzzcache + golangci-lint cache clean + +.PHONY: lint +lint: ## Run secretlint, go mod tidy, golangci-lint + # ref. https://github.com/secretlint/secretlint + docker run -v `pwd`:`pwd` -w `pwd` --rm secretlint/secretlint secretlint "**/*" + # tidy + go mod tidy + git diff --exit-code go.mod go.sum + # lint + # ref. https://golangci-lint.run/usage/linters/ + golangci-lint run --fix --sort-results + git diff --exit-code + +.PHONY: credits +credits: ## Generate CREDITS file + command -v gocredits || go install github.com/Songmu/gocredits/cmd/gocredits@latest + gocredits -skip-missing . > CREDITS + git diff --exit-code + +.PHONY: test +test: githooks ## Run go test and display coverage + # test + go test -v -race -p=4 -parallel=8 -timeout=300s -cover -coverprofile=./coverage.txt ./... + go tool cover -func=./coverage.txt + +.PHONY: ci +ci: lint credits test ## CI command set + +.PHONY: act-check +act-check: + @if ! command -v act >/dev/null 2>&1; then \ + printf "\033[31;1m%s\033[0m\n" "act is not installed: brew install act" 1>&2; \ + exit 1; \ + fi + +.PHONY: act-go-lint +act-go-lint: act-check ## Run go-lint workflow in act + act pull_request --container-architecture linux/amd64 -P ubuntu-latest=catthehacker/ubuntu:act-latest -W .github/workflows/go-lint.yml + +.PHONY: act-go-test +act-go-test: act-check ## Run go-test workflow in act + act pull_request --container-architecture linux/amd64 -P ubuntu-latest=catthehacker/ubuntu:act-latest -W .github/workflows/go-test.yml + +.PHONY: act-go-vuln +act-go-vuln: act-check ## Run go-vuln workflow in act + act pull_request --container-architecture linux/amd64 -P ubuntu-latest=catthehacker/ubuntu:act-latest -W .github/workflows/go-vuln.yml + +.PHONY: release +release: ci ## Run goxz and gh release upload + @command -v goxz >/dev/null || go install github.com/Songmu/goxz/cmd/goxz@latest + git checkout main + git checkout "${GIT_TAG_LATEST}" + -goxz -d "${REPO_TMP_DIR}" -os=linux,darwin,windows -arch=amd64,arm64 -pv "`git describe --tags --abbrev=0`" -trimpath -build-ldflags "-s -w -X ${GO_MODULE_NAME}/internal/config.version=`git describe --tags --abbrev=0` -X ${GO_MODULE_NAME}/internal/config.revision=`git rev-parse HEAD` -X ${GO_MODULE_NAME}/internal/config.branch=`git rev-parse --abbrev-ref HEAD` -X ${GO_MODULE_NAME}/internal/config.timestamp=`git log -n 1 --format='%cI'`" ./cmd/arcgen + -gh release upload "`git describe --tags --abbrev=0`" "${REPO_TMP_DIR}"/*"`git describe --tags --abbrev=0`"* + git checkout "${GIT_BRANCH_CURRENT}" diff --git a/cmd/arcgen/main.go b/cmd/arcgen/main.go new file mode 100644 index 0000000..91079e7 --- /dev/null +++ b/cmd/arcgen/main.go @@ -0,0 +1,16 @@ +package main + +import ( + "context" + "log" + + "github.com/kunitsucom/arcgen/pkg/arcgen" +) + +func main() { + ctx := context.Background() + + if err := arcgen.ARCGen(ctx); err != nil { + log.Fatalf("arcgen: %+v", err) + } +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..68d0cac --- /dev/null +++ b/go.mod @@ -0,0 +1,5 @@ +module github.com/kunitsucom/arcgen + +go 1.21.4 + +require github.com/kunitsucom/util.go v0.0.59-rc.6 diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..9f8ecef --- /dev/null +++ b/go.sum @@ -0,0 +1,2 @@ +github.com/kunitsucom/util.go v0.0.59-rc.6 h1:QJkGrG4ZlzbLcYCkbdOoUuJjFnCgUKfT7/58aeFVNbc= +github.com/kunitsucom/util.go v0.0.59-rc.6/go.mod h1:bYFf2JvRqVF1brBtpdt3xkkTGJBxmYBxZlItrc/lf7Y= diff --git a/internal/arcgen/lang/go/dump_source.go b/internal/arcgen/lang/go/dump_source.go new file mode 100644 index 0000000..f2320e8 --- /dev/null +++ b/internal/arcgen/lang/go/dump_source.go @@ -0,0 +1,35 @@ +package arcgengo + +import ( + "bytes" + goast "go/ast" + "go/token" + "io" + + "github.com/kunitsucom/arcgen/internal/logs" +) + +func dumpSource(fset *token.FileSet, arcSrcSet ARCSourceSet) { + for _, arcSrc := range arcSrcSet { + logs.Trace.Print("== Source ================================================================================================================================") + _, _ = io.WriteString(logs.Trace.LineWriter("r.CommentGroup.Text: "), arcSrc.CommentGroup.Text()) + logs.Trace.Print("-- CommentGroup --------------------------------------------------------------------------------------------------------------------------------") + { + commentGroupAST := bytes.NewBuffer(nil) + goast.Fprint(commentGroupAST, fset, arcSrc.CommentGroup, goast.NotNilFilter) + _, _ = logs.Trace.LineWriter("").Write(commentGroupAST.Bytes()) + } + logs.Trace.Print("-- TypeSpec --------------------------------------------------------------------------------------------------------------------------------") + { + typeSpecAST := bytes.NewBuffer(nil) + goast.Fprint(typeSpecAST, fset, arcSrc.TypeSpec, goast.NotNilFilter) + _, _ = logs.Trace.LineWriter("").Write(typeSpecAST.Bytes()) + } + logs.Trace.Print("-- StructType --------------------------------------------------------------------------------------------------------------------------------") + { + structTypeAST := bytes.NewBuffer(nil) + goast.Fprint(structTypeAST, fset, arcSrc.StructType, goast.NotNilFilter) + _, _ = logs.Trace.LineWriter("").Write(structTypeAST.Bytes()) + } + } +} diff --git a/internal/arcgen/lang/go/extract_source.go b/internal/arcgen/lang/go/extract_source.go new file mode 100644 index 0000000..9be6434 --- /dev/null +++ b/internal/arcgen/lang/go/extract_source.go @@ -0,0 +1,74 @@ +package arcgengo + +import ( + "context" + goast "go/ast" + "go/token" + "reflect" + "strings" + + errorz "github.com/kunitsucom/util.go/errors" + filepathz "github.com/kunitsucom/util.go/path/filepath" + + "github.com/kunitsucom/arcgen/internal/config" + "github.com/kunitsucom/arcgen/internal/logs" + apperr "github.com/kunitsucom/arcgen/pkg/errors" +) + +//nolint:gocognit,cyclop +func extractSource(_ context.Context, fset *token.FileSet, f *goast.File) (ARCSourceSet, error) { + arcSrcSet := make(ARCSourceSet, 0) + for commentedNode, commentGroups := range goast.NewCommentMap(fset, f, f.Comments) { + for _, commentGroup := range commentGroups { + CommentGroupLoop: + for _, commentLine := range commentGroup.List { + logs.Trace.Printf("commentLine=%s: %s", filepathz.Short(fset.Position(commentGroup.Pos()).String()), commentLine.Text) + // NOTE: If the comment line matches the ColumnTagGo, it is assumed to be a comment line for the struct. + if matches := ColumnTagGoCommentLineRegex().FindStringSubmatch(commentLine.Text); len(matches) > _ColumnTagGoCommentLineRegexContentIndex { + s := &ARCSource{ + Position: fset.Position(commentLine.Pos()), + Package: f.Name, + CommentGroup: commentGroup, + } + goast.Inspect(commentedNode, func(node goast.Node) bool { + switch n := node.(type) { + case *goast.TypeSpec: + s.TypeSpec = n + switch t := n.Type.(type) { + case *goast.StructType: + s.StructType = t + if hasColumnTagGo(t) { + logs.Debug.Printf("🔍: %s: type=%s", fset.Position(t.Pos()).String(), n.Name.Name) + arcSrcSet = append(arcSrcSet, s) + } + return false + default: // noop + } + default: // noop + } + return true + }) + break CommentGroupLoop // NOTE: There may be multiple "ColumnTagGo"s in the same commentGroup, so once you find the first one, break. + } + } + } + } + + if len(arcSrcSet) == 0 { + return nil, errorz.Errorf("column-tag-go=%s: %w", config.ColumnTagGo(), apperr.ErrColumnTagGoAnnotationNotFoundInSource) + } + + return arcSrcSet, nil +} + +func hasColumnTagGo(s *goast.StructType) bool { + for _, field := range s.Fields.List { + if field.Tag != nil { + tag := reflect.StructTag(strings.Trim(field.Tag.Value, "`")) + if columnName := tag.Get(config.ColumnTagGo()); columnName != "" { + return true + } + } + } + return false +} diff --git a/internal/arcgen/lang/go/generate.go b/internal/arcgen/lang/go/generate.go new file mode 100644 index 0000000..98bcb60 --- /dev/null +++ b/internal/arcgen/lang/go/generate.go @@ -0,0 +1,267 @@ +package arcgengo + +import ( + "bytes" + "context" + "fmt" + "go/ast" + "go/format" + "go/token" + "os" + "reflect" + "strconv" + "strings" + + errorz "github.com/kunitsucom/util.go/errors" + filepathz "github.com/kunitsucom/util.go/path/filepath" + + "github.com/kunitsucom/arcgen/internal/arcgen/lang/util" + "github.com/kunitsucom/arcgen/internal/config" +) + +//nolint:cyclop +func Generate(ctx context.Context, src string) error { + arcSrcSets, err := parse(ctx, src) + if err != nil { + return errorz.Errorf("parse: %w", err) + } + + newFile := token.NewFileSet() + + for _, arcSrcSet := range arcSrcSets { + for _, arcSrc := range arcSrcSet { + filePrefix := strings.TrimSuffix(arcSrc.Position.Filename, fileSuffix) + filename := fmt.Sprintf("%s.%s.gen%s", filePrefix, config.ColumnTagGo(), fileSuffix) + f, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o644) + if err != nil { + return errorz.Errorf("os.Open: %w", err) + } + + packageName := arcSrc.Package.Name + structName := arcSrc.TypeSpec.Name.Name + tableName := extractTableNameFromCommentGroup(arcSrc.CommentGroup) + columnNames := func() []string { + columnNames := make([]string, 0) + for _, field := range arcSrc.StructType.Fields.List { + if field.Tag != nil { + tag := reflect.StructTag(strings.Trim(field.Tag.Value, "`")) + switch columnName := tag.Get(config.ColumnTagGo()); columnName { + case "", "-": + // noop + default: + columnNames = append(columnNames, columnName) + } + } + } + return columnNames + }() + + node := generateASTFile(packageName, structName, tableName, config.MethodPrefixGlobal(), config.MethodPrefixColumn(), columnNames) + + buf := bytes.NewBuffer(nil) + if err := format.Node(buf, newFile, node); err != nil { + return errorz.Errorf("format.Node: %w", err) + } + + // add header comment + s := strings.Replace( + buf.String(), + "package "+packageName+"\n", + fmt.Sprintf("// Code generated by arcgen. DO NOT EDIT.\n//\n// source: %s:%d\n\npackage "+packageName+"\n", filepathz.Short(arcSrc.Position.Filename), arcSrc.Position.Line), + 1, + ) + // add blank line between methods + s = strings.ReplaceAll(s, "\n}\nfunc ", "\n}\n\nfunc ") + + // write to file + if _, err := f.WriteString(s); err != nil { + return errorz.Errorf("f.WriteString: %w", err) + } + } + } + + return nil +} + +func extractTableNameFromCommentGroup(commentGroup *ast.CommentGroup) string { + for _, comment := range commentGroup.List { + if matches := util.RegexIndexTableName.Regex.FindStringSubmatch(comment.Text); len(matches) > util.RegexIndexTableName.Index { + return matches[util.RegexIndexTableName.Index] + } + } + return fmt.Sprintf("ERROR: TABLE NAME IN COMMENT `// \"%s\": table: *` NOT FOUND: comment=%q", config.ColumnTagGo(), commentGroup.Text()) +} + +//nolint:funlen +func generateASTFile(packageName string, structName string, tableName string, prefixGlobal string, prefixColumn string, columnNames []string) *ast.File { + file := &ast.File{ + // package + Name: &ast.Ident{ + Name: packageName, + }, + // methods + Decls: []ast.Decl{ + &ast.FuncDecl{ + Recv: &ast.FieldList{ + List: []*ast.Field{ + { + Names: []*ast.Ident{ + { + Name: "s", + }, + }, + Type: &ast.StarExpr{ + X: &ast.Ident{ + Name: structName, // MEMO: struct name + }, + }, + }, + }, + }, + Name: &ast.Ident{ + Name: prefixGlobal + "TableName", + }, + Type: &ast.FuncType{ + Params: &ast.FieldList{}, + Results: &ast.FieldList{ + List: []*ast.Field{ + { + Type: &ast.Ident{ + Name: "string", + }, + }, + }, + }, + }, + Body: &ast.BlockStmt{ + List: []ast.Stmt{ + &ast.ReturnStmt{ + Results: []ast.Expr{ + &ast.Ident{ + Name: strconv.Quote(tableName), + }, + }, + }, + }, + }, + }, + }, + } + + file.Decls = append(file.Decls, generateASTColumnMethods(structName, prefixGlobal, prefixColumn, columnNames)...) + + return file +} + +//nolint:funlen +func generateASTColumnMethods(structName string, prefixGlobal string, prefixColumn string, columnNames []string) []ast.Decl { + decls := make([]ast.Decl, 0) + + // all column names method + elts := make([]ast.Expr, 0) + for _, columnName := range columnNames { + elts = append(elts, &ast.BasicLit{ + Kind: token.STRING, + Value: strconv.Quote(columnName), + }) + } + decls = append(decls, &ast.FuncDecl{ + Recv: &ast.FieldList{ + List: []*ast.Field{ + { + Names: []*ast.Ident{ + { + Name: "s", + }, + }, + Type: &ast.StarExpr{ + X: &ast.Ident{ + Name: structName, // MEMO: struct name + }, + }, + }, + }, + }, + Name: &ast.Ident{ + Name: prefixGlobal + "ColumnNames", + }, + Type: &ast.FuncType{ + Params: &ast.FieldList{}, + Results: &ast.FieldList{ + List: []*ast.Field{ + { + Type: &ast.Ident{ + Name: "[]string", + }, + }, + }, + }, + }, + Body: &ast.BlockStmt{ + List: []ast.Stmt{ + &ast.ReturnStmt{ + Results: []ast.Expr{ + &ast.CompositeLit{ + Type: &ast.ArrayType{ + Elt: &ast.Ident{ + Name: "string", + }, + }, + Elts: elts, + }, + }, + }, + }, + }, + }) + + // each column name methods + for _, columnName := range columnNames { + decls = append(decls, &ast.FuncDecl{ + Recv: &ast.FieldList{ + List: []*ast.Field{ + { + Names: []*ast.Ident{ + { + Name: "s", + }, + }, + Type: &ast.StarExpr{ + X: &ast.Ident{ + Name: structName, // MEMO: struct name + }, + }, + }, + }, + }, + Name: &ast.Ident{ + Name: prefixGlobal + prefixColumn + columnName, + }, + Type: &ast.FuncType{ + Params: &ast.FieldList{}, + Results: &ast.FieldList{ + List: []*ast.Field{ + { + Type: &ast.Ident{ + Name: "string", + }, + }, + }, + }, + }, + Body: &ast.BlockStmt{ + List: []ast.Stmt{ + &ast.ReturnStmt{ + Results: []ast.Expr{ + &ast.Ident{ + Name: strconv.Quote(columnName), + }, + }, + }, + }, + }, + }) + } + + return decls +} diff --git a/internal/arcgen/lang/go/parse.go b/internal/arcgen/lang/go/parse.go new file mode 100644 index 0000000..74dc158 --- /dev/null +++ b/internal/arcgen/lang/go/parse.go @@ -0,0 +1,90 @@ +package arcgengo + +import ( + "context" + "errors" + "go/parser" + "go/token" + "os" + "path/filepath" + "strings" + + errorz "github.com/kunitsucom/util.go/errors" + + "github.com/kunitsucom/arcgen/internal/logs" + "github.com/kunitsucom/arcgen/internal/util" + apperr "github.com/kunitsucom/arcgen/pkg/errors" +) + +func parse(ctx context.Context, src string) (ARCSourceSets, error) { + // MEMO: get absolute path for parser.ParseFile() + sourceAbs := util.Abs(src) + + info, err := os.Stat(sourceAbs) + if err != nil { + return nil, errorz.Errorf("os.Stat: %w", err) + } + + arcSrcSets := make(ARCSourceSets, 0) + + if info.IsDir() { + if err := filepath.WalkDir(sourceAbs, walkDirFn(ctx, &arcSrcSets)); err != nil { + return nil, errorz.Errorf("filepath.WalkDir: %w", err) + } + + return arcSrcSets, nil + } + + arcSrcSet, err := parseFile(ctx, sourceAbs) + if err != nil { + return nil, errorz.Errorf("parseFile: file=%s: %v", sourceAbs, err) + } + + arcSrcSets = append(arcSrcSets, arcSrcSet) + return arcSrcSets, nil +} + +//nolint:gochecknoglobals +var fileSuffix = ".go" + +func walkDirFn(ctx context.Context, arcSrcSets *ARCSourceSets) func(path string, d os.DirEntry, err error) error { + return func(path string, d os.DirEntry, err error) error { + if err != nil { + return err //nolint:wrapcheck + } + + if d.IsDir() || !strings.HasSuffix(path, fileSuffix) || strings.HasSuffix(path, "_test.go") { + return nil + } + + arcSrcSet, err := parseFile(ctx, path) + if err != nil { + if errors.Is(err, apperr.ErrColumnTagGoAnnotationNotFoundInSource) { + logs.Debug.Printf("SKIP: parseFile: file=%s: %v", path, err) + return nil + } + return errorz.Errorf("parseFile: file=%s: %v", path, err) + } + + *arcSrcSets = append(*arcSrcSets, arcSrcSet) + + return nil + } +} + +func parseFile(ctx context.Context, filename string) (ARCSourceSet, error) { + fset := token.NewFileSet() + rootNode, err := parser.ParseFile(fset, filename, nil, parser.ParseComments) + if err != nil { + return nil, errorz.Errorf("parser.ParseFile: %w", err) + } + + arcSrcSet, err := extractSource(ctx, fset, rootNode) + if err != nil { + return nil, errorz.Errorf("extractSource: %w", err) + } + + dumpSource(fset, arcSrcSet) + + return arcSrcSet, nil +} diff --git a/internal/arcgen/lang/go/source.go b/internal/arcgen/lang/go/source.go new file mode 100644 index 0000000..194d6c7 --- /dev/null +++ b/internal/arcgen/lang/go/source.go @@ -0,0 +1,46 @@ +package arcgengo + +import ( + "fmt" + "go/ast" + "go/token" + "regexp" + "sync" + + "github.com/kunitsucom/arcgen/internal/config" +) + +type ( + ARCSource struct { + Position token.Position + Package *ast.Ident + // TypeSpec is used to guess the table name if the CREATE TABLE annotation is not found. + TypeSpec *ast.TypeSpec + // StructType is used to determine the column name. If the tag specified by --column-tag-go is not found, the field name is used. + StructType *ast.StructType + CommentGroup *ast.CommentGroup + } + ARCSourceSet []*ARCSource + ARCSourceSets []ARCSourceSet +) + +//nolint:gochecknoglobals +var ( + _ColumnTagGoCommentLineRegex *regexp.Regexp + _ColumnTagGoCommentLineRegexOnce sync.Once +) + +const ( + // _____________ <- 1. comment prefix + // __ <- 2. tag name + // ___ <- 4. comment suffix + _ColumnTagGoCommentLineRegexFormat = `^\s*(//+\s*|/\*\s*)?(%s)\s*:\s*(.*)\s*(\*/)?` + _ColumnTagGoCommentLineRegexContentIndex = /* ^^ 3. tag value */ 3 +) + +func ColumnTagGoCommentLineRegex() *regexp.Regexp { + _ColumnTagGoCommentLineRegexOnce.Do(func() { + _ColumnTagGoCommentLineRegex = regexp.MustCompile(fmt.Sprintf(_ColumnTagGoCommentLineRegexFormat, config.ColumnTagGo())) + }) + return _ColumnTagGoCommentLineRegex +} diff --git a/internal/arcgen/lang/util/regex.go b/internal/arcgen/lang/util/regex.go new file mode 100644 index 0000000..0c32345 --- /dev/null +++ b/internal/arcgen/lang/util/regex.go @@ -0,0 +1,16 @@ +package util + +import "regexp" + +type RegexIndex struct { + Regex *regexp.Regexp + Index int +} + +//nolint:gochecknoglobals +var ( + RegexIndexTableName = RegexIndex{ + Regex: regexp.MustCompile(`^\s*(//+\s*|/\*\s*)?\S+\s*:\s*table(s)?\s*:\s*(\S+.*)$`), + Index: 3, + } +) diff --git a/internal/config/column_tag_go.go b/internal/config/column_tag_go.go new file mode 100644 index 0000000..8182379 --- /dev/null +++ b/internal/config/column_tag_go.go @@ -0,0 +1,18 @@ +package config + +import ( + "context" + + cliz "github.com/kunitsucom/util.go/exp/cli" +) + +func loadColumnTagGo(_ context.Context, cmd *cliz.Command) string { + v, _ := cmd.GetOptionString(_OptionColumnTagGo) + return v +} + +func ColumnTagGo() string { + globalConfigMu.RLock() + defer globalConfigMu.RUnlock() + return globalConfig.ColumnTagGo +} diff --git a/internal/config/config.go b/internal/config/config.go new file mode 100644 index 0000000..c08a135 --- /dev/null +++ b/internal/config/config.go @@ -0,0 +1,204 @@ +package config + +import ( + "context" + "encoding/json" + "sync" + "time" + + errorz "github.com/kunitsucom/util.go/errors" + cliz "github.com/kunitsucom/util.go/exp/cli" + + "github.com/kunitsucom/arcgen/internal/contexts" + "github.com/kunitsucom/arcgen/internal/logs" +) + +// Use a structure so that settings can be backed up. +// +//nolint:tagliatelle +type config struct { + Version bool `json:"version"` + Trace bool `json:"trace"` + Debug bool `json:"debug"` + Timestamp string `json:"timestamp"` + Language string `json:"language"` + Source string `json:"source"` + // Golang + ColumnTagGo string `json:"column_tag_go"` + MethodPrefixColumn string `json:"column_method_prefix"` + MethodPrefixGlobal string `json:"global_method_prefix"` +} + +//nolint:gochecknoglobals +var ( + globalConfig *config + globalConfigMu sync.RWMutex +) + +func MustLoad(ctx context.Context) (rollback func()) { + rollback, err := Load(ctx) + if err != nil { + err = errorz.Errorf("Load: %w", err) + panic(err) + } + return rollback +} + +func Load(ctx context.Context) (rollback func(), err error) { + globalConfigMu.Lock() + defer globalConfigMu.Unlock() + backup := globalConfig + + cfg, err := load(ctx) + if err != nil { + return nil, errorz.Errorf("load: %w", err) + } + + globalConfig = cfg + + rollback = func() { + globalConfigMu.Lock() + defer globalConfigMu.Unlock() + globalConfig = backup + } + + return rollback, nil +} + +const ( + _OptionVersion = "version" + + _OptionTrace = "trace" + _EnvKeyTrace = "ARCGEN_TRACE" + + _OptionDebug = "debug" + _EnvKeyDebug = "ARCGEN_DEBUG" + + _OptionTimestamp = "timestamp" + _EnvKeyTimestamp = "ARCGEN_TIMESTAMP" + + _OptionLanguage = "lang" + _EnvKeyLanguage = "ARCGEN_LANGUAGE" + + _OptionSource = "src" + _EnvKeySource = "ARCGEN_SOURCE" + + _OptionDestination = "dst" + _EnvKeyDestination = "ARCGEN_DESTINATION" + + // Golang + + _OptionColumnTagGo = "column-tag-go" + _EnvKeyColumnTagGo = "ARCGEN_COLUMN_TAG_GO" + + _OptionMethodPrefixGlobal = "method-prefix-global" + _EnvKeyMethodPrefixGlobal = "ARCGEN_METHOD_PREFIX_GLOBAL" + + _OptionMethodPrefixColumn = "method-prefix-column" + _EnvKeyMethodPrefixColumn = "ARCGEN_METHOD_PREFIX_COLUMN" +) + +// MEMO: Since there is a possibility of returning some kind of error in the future, the signature is made to return an error. +// +//nolint:funlen +func load(ctx context.Context) (cfg *config, err error) { //nolint:unparam + cmd := &cliz.Command{ + Name: "ARCgen", + Description: "Generate DDL from annotated source code.", + Options: []cliz.Option{ + &cliz.BoolOption{ + Name: _OptionVersion, + Description: "show version information and exit", + Default: cliz.Default(false), + }, + &cliz.BoolOption{ + Name: _OptionTrace, + Environment: _EnvKeyTrace, + Description: "trace mode enabled", + Default: cliz.Default(false), + }, + &cliz.BoolOption{ + Name: _OptionDebug, + Environment: _EnvKeyDebug, + Description: "debug mode", + Default: cliz.Default(false), + }, + &cliz.StringOption{ + Name: _OptionTimestamp, + Environment: _EnvKeyTimestamp, + Description: "code generation timestamp", + Default: cliz.Default(time.Now().Format(time.RFC3339)), + }, + &cliz.StringOption{ + Name: _OptionLanguage, + Environment: _EnvKeyLanguage, + Description: "programming language to generate DDL", + Default: cliz.Default("go"), + }, + &cliz.StringOption{ + Name: _OptionSource, + Environment: _EnvKeySource, + Description: "source file or directory", + Default: cliz.Default("/dev/stdin"), + }, + &cliz.StringOption{ + Name: _OptionDestination, + Environment: _EnvKeyDestination, + Description: "destination file or directory", + Default: cliz.Default("/dev/stdout"), + }, + // Golang + &cliz.StringOption{ + Name: _OptionColumnTagGo, + Environment: _EnvKeyColumnTagGo, + Description: "column annotation key for Go struct tag", + Default: cliz.Default("db"), + }, + &cliz.StringOption{ + Name: _OptionMethodPrefixGlobal, + Environment: _EnvKeyMethodPrefixGlobal, + Description: "global method prefix", + Default: cliz.Default("Get"), + }, + &cliz.StringOption{ + Name: _OptionMethodPrefixColumn, + Environment: _EnvKeyMethodPrefixColumn, + Description: "column method prefix", + Default: cliz.Default("ColumnName_"), + }, + }, + } + + if _, err := cmd.Parse(contexts.Args(ctx)); err != nil { + return nil, errorz.Errorf("cmd.Parse: %w", err) + } + + c := &config{ + Version: loadVersion(ctx, cmd), + Trace: loadTrace(ctx, cmd), + Debug: loadDebug(ctx, cmd), + Timestamp: loadTimestamp(ctx, cmd), + Language: loadLanguage(ctx, cmd), + Source: loadSource(ctx, cmd), + // Golang + ColumnTagGo: loadColumnTagGo(ctx, cmd), + MethodPrefixGlobal: loadMethodPrefixGlobal(ctx, cmd), + MethodPrefixColumn: loadMethodPrefixColumn(ctx, cmd), + } + + if c.Debug { + logs.Debug = logs.NewDebug() + logs.Trace.Print("debug mode enabled") + } + if c.Trace { + logs.Trace = logs.NewTrace() + logs.Debug = logs.NewDebug() + logs.Debug.Print("trace mode enabled") + } + + if err := json.NewEncoder(logs.Debug).Encode(c); err != nil { + logs.Debug.Printf("config: %#v", c) + } + + return c, nil +} diff --git a/internal/config/debug.go b/internal/config/debug.go new file mode 100644 index 0000000..e7c275d --- /dev/null +++ b/internal/config/debug.go @@ -0,0 +1,18 @@ +package config + +import ( + "context" + + cliz "github.com/kunitsucom/util.go/exp/cli" +) + +func loadDebug(_ context.Context, cmd *cliz.Command) bool { + v, _ := cmd.GetOptionBool(_OptionDebug) + return v +} + +func Debug() bool { + globalConfigMu.RLock() + defer globalConfigMu.RUnlock() + return globalConfig.Debug +} diff --git a/internal/config/language.go b/internal/config/language.go new file mode 100644 index 0000000..4c63186 --- /dev/null +++ b/internal/config/language.go @@ -0,0 +1,18 @@ +package config + +import ( + "context" + + cliz "github.com/kunitsucom/util.go/exp/cli" +) + +func loadLanguage(_ context.Context, cmd *cliz.Command) string { + v, _ := cmd.GetOptionString(_OptionLanguage) + return v +} + +func Language() string { + globalConfigMu.RLock() + defer globalConfigMu.RUnlock() + return globalConfig.Language +} diff --git a/internal/config/method_prefix_column.go b/internal/config/method_prefix_column.go new file mode 100644 index 0000000..bd10a02 --- /dev/null +++ b/internal/config/method_prefix_column.go @@ -0,0 +1,18 @@ +package config + +import ( + "context" + + cliz "github.com/kunitsucom/util.go/exp/cli" +) + +func loadMethodPrefixColumn(_ context.Context, cmd *cliz.Command) string { + v, _ := cmd.GetOptionString(_OptionMethodPrefixColumn) + return v +} + +func MethodPrefixColumn() string { + globalConfigMu.RLock() + defer globalConfigMu.RUnlock() + return globalConfig.MethodPrefixColumn +} diff --git a/internal/config/method_prefix_global.go b/internal/config/method_prefix_global.go new file mode 100644 index 0000000..53d25a3 --- /dev/null +++ b/internal/config/method_prefix_global.go @@ -0,0 +1,18 @@ +package config + +import ( + "context" + + cliz "github.com/kunitsucom/util.go/exp/cli" +) + +func loadMethodPrefixGlobal(_ context.Context, cmd *cliz.Command) string { + v, _ := cmd.GetOptionString(_OptionMethodPrefixGlobal) + return v +} + +func MethodPrefixGlobal() string { + globalConfigMu.RLock() + defer globalConfigMu.RUnlock() + return globalConfig.MethodPrefixGlobal +} diff --git a/internal/config/source.go b/internal/config/source.go new file mode 100644 index 0000000..96cf9e1 --- /dev/null +++ b/internal/config/source.go @@ -0,0 +1,18 @@ +package config + +import ( + "context" + + cliz "github.com/kunitsucom/util.go/exp/cli" +) + +func loadSource(_ context.Context, cmd *cliz.Command) string { + v, _ := cmd.GetOptionString(_OptionSource) + return v +} + +func Source() string { + globalConfigMu.RLock() + defer globalConfigMu.RUnlock() + return globalConfig.Source +} diff --git a/internal/config/timestamp.go b/internal/config/timestamp.go new file mode 100644 index 0000000..d07a6d1 --- /dev/null +++ b/internal/config/timestamp.go @@ -0,0 +1,18 @@ +package config + +import ( + "context" + + cliz "github.com/kunitsucom/util.go/exp/cli" +) + +func loadTimestamp(_ context.Context, cmd *cliz.Command) string { + v, _ := cmd.GetOptionString(_OptionTimestamp) + return v +} + +func Timestamp() string { + globalConfigMu.RLock() + defer globalConfigMu.RUnlock() + return globalConfig.Timestamp +} diff --git a/internal/config/trace.go b/internal/config/trace.go new file mode 100644 index 0000000..e4c3e79 --- /dev/null +++ b/internal/config/trace.go @@ -0,0 +1,18 @@ +package config + +import ( + "context" + + cliz "github.com/kunitsucom/util.go/exp/cli" +) + +func loadTrace(_ context.Context, cmd *cliz.Command) bool { + v, _ := cmd.GetOptionBool(_OptionTrace) + return v +} + +func Trace() bool { + globalConfigMu.RLock() + defer globalConfigMu.RUnlock() + return globalConfig.Trace +} diff --git a/internal/config/version.go b/internal/config/version.go new file mode 100644 index 0000000..f40b43a --- /dev/null +++ b/internal/config/version.go @@ -0,0 +1,31 @@ +package config + +import ( + "context" + + cliz "github.com/kunitsucom/util.go/exp/cli" +) + +func loadVersion(_ context.Context, cmd *cliz.Command) bool { + v, _ := cmd.GetOptionBool(_OptionVersion) + return v +} + +func Version() bool { + globalConfigMu.RLock() + defer globalConfigMu.RUnlock() + return globalConfig.Version +} + +// nolint: deadcode,gochecknoglobals,unused,varcheck +var ( + version string + revision string + branch string + timestamp string +) + +func BuildVersion() string { return version } +func BuildRevision() string { return revision } +func BuildBranch() string { return branch } +func BuildTimestamp() string { return timestamp } diff --git a/internal/contexts/args.go b/internal/contexts/args.go new file mode 100644 index 0000000..261fdd0 --- /dev/null +++ b/internal/contexts/args.go @@ -0,0 +1,20 @@ +package contexts + +import ( + "context" + "os" +) + +type contextKeyArgs struct{} + +func Args(ctx context.Context) []string { + if v, ok := ctx.Value(contextKeyArgs{}).([]string); ok { + return v + } + + return os.Args[0:] +} + +func WithArgs(ctx context.Context, now []string) context.Context { + return context.WithValue(ctx, contextKeyArgs{}, now) +} diff --git a/internal/contexts/now.go b/internal/contexts/now.go new file mode 100644 index 0000000..0788322 --- /dev/null +++ b/internal/contexts/now.go @@ -0,0 +1,29 @@ +package contexts + +import ( + "context" + "time" +) + +type contextKeyNow struct{} + +func Now(ctx context.Context) time.Time { + if now, ok := ctx.Value(contextKeyNow{}).(time.Time); ok { + return now + } + + return time.Now() +} + +func WithNow(ctx context.Context, now time.Time) context.Context { + return context.WithValue(ctx, contextKeyNow{}, now) +} + +func WithNowString(ctx context.Context, layout string, value string) context.Context { + v, err := time.Parse(layout, value) + if err != nil { + v = time.Now() + } + + return WithNow(ctx, v) +} diff --git a/internal/logs/logger.go b/internal/logs/logger.go new file mode 100644 index 0000000..b6e27e4 --- /dev/null +++ b/internal/logs/logger.go @@ -0,0 +1,63 @@ +package logs + +import ( + "bytes" + "fmt" + "io" + "log" + "os" + + ioz "github.com/kunitsucom/util.go/io" +) + +//nolint:gochecknoglobals +var ( + Trace Logger = NewDiscard() //nolint:revive + Debug Logger = NewDiscard() //nolint:revive + Info Logger = logger{log.New(os.Stderr, "INFO: ", log.Ldate|log.Ltime|log.Lshortfile)} + Warn Logger = logger{log.New(os.Stderr, "WARN: ", log.Ldate|log.Ltime|log.Lshortfile)} +) + +func NewDiscard() Logger { //nolint:ireturn + return logger{log.New(io.Discard, "", 0)} +} + +func NewTrace() Logger { //nolint:ireturn + return logger{log.New(os.Stderr, "TRACE: ", log.Ldate|log.Ltime|log.Lshortfile)} +} + +func NewDebug() Logger { //nolint:ireturn + return logger{log.New(os.Stderr, "DEBUG: ", log.Ldate|log.Ltime|log.Lshortfile)} +} + +type Logger interface { + io.Writer + Print(v ...interface{}) + Printf(format string, v ...interface{}) + LineWriter(prefix string) io.Writer +} + +const callerSkip = 2 + +type logger struct{ l *log.Logger } + +func (l logger) Print(v ...interface{}) { _ = l.l.Output(callerSkip, fmt.Sprint(v...)) } +func (l logger) Printf(format string, v ...interface{}) { + _ = l.l.Output(callerSkip, fmt.Sprintf(format, v...)) +} + +func (l logger) Write(p []byte) (n int, err error) { + l.Print(string(p)) + return len(p), nil +} + +func (l logger) LineWriter(prefix string) io.Writer { + return ioz.WriteFunc(func(p []byte) (n int, err error) { + lines := bytes.Split(p, []byte("\n")) + for _, line := range lines { + _ = l.l.Output(1, prefix+string(line)) + } + + return len(p), nil + }) +} diff --git a/internal/util/abs.go b/internal/util/abs.go new file mode 100644 index 0000000..2325e11 --- /dev/null +++ b/internal/util/abs.go @@ -0,0 +1,16 @@ +package util + +import ( + "path/filepath" + + "github.com/kunitsucom/arcgen/internal/logs" +) + +func Abs(path string) string { + abs, err := filepath.Abs(path) + if err != nil { + logs.Warn.Printf("failed to get absolute path. use path instead: path=%s: %v", path, err) + return path + } + return abs +} diff --git a/pkg/arcgen/arcgen.go b/pkg/arcgen/arcgen.go new file mode 100644 index 0000000..7e5a443 --- /dev/null +++ b/pkg/arcgen/arcgen.go @@ -0,0 +1,56 @@ +package arcgen + +import ( + "context" + "errors" + "fmt" + "time" + + errorz "github.com/kunitsucom/util.go/errors" + cliz "github.com/kunitsucom/util.go/exp/cli" + + arcgengo "github.com/kunitsucom/arcgen/internal/arcgen/lang/go" + "github.com/kunitsucom/arcgen/internal/config" + "github.com/kunitsucom/arcgen/internal/contexts" + "github.com/kunitsucom/arcgen/internal/logs" +) + +func ARCGen(ctx context.Context) error { + if _, err := config.Load(ctx); err != nil { + if errors.Is(err, cliz.ErrHelp) { + return nil + } + return fmt.Errorf("config.Load: %w", err) + } + + if config.Version() { + fmt.Printf("version: %s\n", config.BuildVersion()) //nolint:forbidigo + fmt.Printf("revision: %s\n", config.BuildRevision()) //nolint:forbidigo + fmt.Printf("build branch: %s\n", config.BuildBranch()) //nolint:forbidigo + fmt.Printf("build timestamp: %s\n", config.BuildTimestamp()) //nolint:forbidigo + return nil + } + + ctx = contexts.WithNowString(ctx, time.RFC3339, config.Timestamp()) + + src := config.Source() + logs.Info.Printf("source: %s", src) + + if err := generate(ctx, src); err != nil { + return errorz.Errorf("parse: %w", err) + } + + return nil +} + +func generate(ctx context.Context, src string) error { + switch language := config.Language(); language { + case "go": + if err := arcgengo.Generate(ctx, src); err != nil { + return errorz.Errorf("arcgengo.Fprint: %w", err) + } + return nil + default: + return errorz.Errorf("unknown language: %s", language) + } +} diff --git a/pkg/errors/errors.go b/pkg/errors/errors.go new file mode 100644 index 0000000..c3489c4 --- /dev/null +++ b/pkg/errors/errors.go @@ -0,0 +1,10 @@ +package errors + +import "errors" + +var ( + ErrUnknownError = errors.New("unknown error") + ErrNotSupported = errors.New("not supported") + ErrUnformattedFileIsNotSupported = errors.New("unformatted file is not supported") + ErrColumnTagGoAnnotationNotFoundInSource = errors.New("column-tag-go annotation not found in source") +)