new script: profilarr

This commit is contained in:
bilulib
2025-07-21 13:38:47 +02:00
parent 8ebf25361b
commit 45c23e5b8e
41 changed files with 209 additions and 7520 deletions

2
.gitignore vendored
View File

@ -1,3 +1,5 @@
Huntarr.io-6.3.6/
huly-selfhost-main/
profilarr/

7
ct/headers/profilarr Normal file
View File

@ -0,0 +1,7 @@
_____ __
____ _________ / __(_) /___ ___________
/ __ \/ ___/ __ \/ /_/ / / __ `/ ___/ ___/
/ /_/ / / / /_/ / __/ / / /_/ / / / /
/ .___/_/ \____/_/ /_/_/\__,_/_/ /_/
/_/

View File

@ -1,331 +0,0 @@
#!/usr/bin/env bash
# shellcheck disable=SC1091,SC2034 # SC1091: non-constant source, SC2034: unused variables
source <(curl -fsSL https://git.bila.li/Proxmox/proxmox-ve-install-scripts/raw/branch/main/misc/build.func) # Updated to main branch
# Copyright (c) 2021-2025 community-scripts ORG
# Author: GitHub Copilot
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/hcengineering/huly-selfhost
# App Default Values
APP="Huly"
var_tags="app;productivity;collaboration" # Used by build.func
var_cpu="2" # Minimum recommended, Huly can be resource intensive. Used by build.func
var_ram="4096" # Minimum recommended, especially with Rekoni. Used by build.func
var_disk="20" # Base disk, consider more for MinIO data and MongoDB. Used by build.func
var_os="debian" # Used by build.func
var_version="12" # Debian Bookworm. Used by build.func
var_unprivileged="1" # Run as unprivileged container. Used by build.func
header_info "$APP"
variables # This function is from build.func, sets up CTID, IP, etc.
color
catch_errors
# Paths and Configs
HULY_CONFIG_DIR="/opt/huly-selfhost"
HULY_INSTALL_DIR="/opt/huly"
HULY_CONFIG_FILE="$HULY_CONFIG_DIR/native.conf"
HULY_CREDS_FILE="/root/huly.creds" # Storing in /root for CT context
HULY_VERSION_FILE="$HULY_CONFIG_DIR/version.txt"
MINIO_DATA_DIR="/opt/minio/data"
_MONGODB_DATA_DIR="/var/lib/mongodb" # Default MongoDB data directory, prefixed to avoid SC2034 if not directly used in this script
# Services to manage
HULY_SERVICES=("huly-front" "huly-account" "huly-transactor" "huly-collaborator" "huly-rekoni")
DEPENDENCY_SERVICES=("minio" "mongod" "nginx")
ALL_SERVICES=("${HULY_SERVICES[@]}" "${DEPENDENCY_SERVICES[@]}")
# Function to stop all Huly and dependency services
stop_all_services() {
msg_info "Stopping all Huly related services..."
for service in "${ALL_SERVICES[@]}"; do
systemctl stop "$service" 2>/dev/null || true # Suppress error if service not found/running
done
msg_ok "All Huly related services stopped."
}
# Function to start all Huly and dependency services in order
start_all_services() {
msg_info "Starting all Huly related services..."
systemctl start mongod minio nginx # Start core dependencies
sleep 3 # Give them a moment
systemctl start huly-account huly-collaborator # Account & Collaborator need Mongo
sleep 2
systemctl start huly-transactor huly-rekoni # These need Account, Mongo, Minio
sleep 2
systemctl start huly-front # Front needs all backends
msg_ok "All Huly related services started."
}
function update_script() {
header_info
# These checks are typically for the CT environment, called by build.func
# Source build.func to make them available if needed directly, though usually not.
# . <(curl -fsSL https://git.bila.li/Proxmox/proxmox-ve-install-scripts/raw/branch/main/misc/build.func)
# check_container_storage
# check_container_resources
if [[ ! -d "$HULY_INSTALL_DIR" ]] || [[ ! -f "$HULY_CONFIG_FILE" ]]; then
msg_error "No ${APP} Installation Found (missing $HULY_INSTALL_DIR or $HULY_CONFIG_FILE)!"
exit 1
fi
msg_info "Updating $APP"
CURRENT_DATE=$(date '+%Y-%m-%d %H:%M:%S')
stop_all_services
msg_info "Installing Docker temporarily for component updates"
$STD apt-get update
# Using docker.io for simplicity in CT update script, as it's often readily available
# Ensure this doesn't conflict with docker-ce if that was used for install and not fully purged
$STD apt-get install -y docker.io curl
$STD systemctl start docker
msg_ok "Docker installed temporarily."
# Function to extract from container (similar to install script)
extract_component_update() {
local component_name="$1"
local image_name="hardcoreeng/$component_name:latest"
local target_dir="$HULY_INSTALL_DIR/$component_name"
local container_name="huly-${component_name}-update-extract"
msg_info "Pulling latest Docker image for $component_name: $image_name"
if ! $STD docker pull "$image_name"; then
msg_error "Failed to pull Docker image $image_name for $component_name."
return 1 # Continue with other components if one fails?
fi
msg_info "Extracting $component_name from $image_name to $target_dir"
if ! $STD docker create --name "$container_name" "$image_name"; then
msg_error "Failed to create Docker container for $component_name."
return 1
fi
# Clear old component files before extracting new ones
rm -rf "${target_dir:?}/"* # Safety: :? ensures var is set
mkdir -p "$target_dir"
local extracted=false
for path_in_container in "/app/dist" "/app" "/usr/src/app/dist" "/usr/src/app" "/dist" "/opt/app" "/home/app"; do
if $STD docker exec "$container_name" ls "${path_in_container}/." >/dev/null 2>&1; then
if $STD docker cp "${container_name}:${path_in_container}/." "$target_dir/"; then
extracted=true
msg_ok "Extracted updated $component_name from ${path_in_container}"
break
fi
fi
done
if ! $extracted; then
msg_warn "Could not find standard app directory in $component_name container. Copying entire root."
if ! $STD docker cp "${container_name}:/." "$target_dir/"; then
msg_error "Failed to copy any files for updated $component_name."
$STD docker rm "$container_name"
return 1
fi
fi
$STD docker rm "$container_name"
msg_ok "Extraction update completed for $component_name"
}
# Update each Huly component
for component in "${HULY_SERVICES[@]//huly-/}"; do # Removes "huly-" prefix for image name
extract_component_update "$component"
done
msg_info "Removing Docker"
$STD systemctl stop docker
$STD apt-get remove -y docker.io docker-ce docker-ce-cli containerd.io --allow-remove-essential
$STD apt-get purge -y docker.io docker-ce docker-ce-cli containerd.io --allow-remove-essential
$STD apt-get autoremove -y
$STD rm -rf /var/lib/docker /var/lib/containerd
msg_ok "Docker removed."
# Update version information
echo "$CURRENT_DATE - Updated Huly Components" >>"$HULY_VERSION_FILE"
start_all_services
msg_ok "Updated $APP successfully. All services restarted."
}
function backup_script() {
header_info
# check_container_storage (if relevant for backup space)
if [[ ! -d "$HULY_CONFIG_DIR" ]]; then
msg_error "No ${APP} Installation Found (missing $HULY_CONFIG_DIR)!"
exit 1
fi
BACKUP_DATE=$(date +%Y-%m-%d_%H%M%S)
BACKUP_FILENAME_BASE="huly-native-backup-$BACKUP_DATE"
BACKUP_TEMP_DIR="/tmp/$BACKUP_FILENAME_BASE"
ARCHIVE_DESTINATION="/root" # Standard backup location in these scripts
FINAL_ARCHIVE_PATH="$ARCHIVE_DESTINATION/$BACKUP_FILENAME_BASE.tar.gz"
mkdir -p "$BACKUP_TEMP_DIR/config"
mkdir -p "$BACKUP_TEMP_DIR/mongodb"
mkdir -p "$BACKUP_TEMP_DIR/minio_data"
msg_info "Backing up Huly configuration..."
cp "$HULY_CONFIG_FILE" "$BACKUP_TEMP_DIR/config/"
if [ -f "$HULY_CREDS_FILE" ]; then
cp "$HULY_CREDS_FILE" "$BACKUP_TEMP_DIR/config/"
else
msg_warn "Credentials file $HULY_CREDS_FILE not found. Skipping."
fi
if [ -f "$HULY_VERSION_FILE" ]; then
cp "$HULY_VERSION_FILE" "$BACKUP_TEMP_DIR/config/"
fi
# Backup entire /opt/huly-selfhost for any other files?
# cp -r "$HULY_CONFIG_DIR" "$BACKUP_TEMP_DIR/config_full_dir"
msg_ok "Huly configuration backed up."
msg_info "Backing up MongoDB database (huly)..."
# Ensure mongodump is available
if command -v mongodump >/dev/null 2>&1; then
if mongodump --db=huly --archive="$BACKUP_TEMP_DIR/mongodb/huly.gz" --gzip; then
msg_ok "MongoDB backup successful."
else
msg_warn "MongoDB backup (mongodump) failed. Archive might be incomplete."
fi
else
msg_warn "mongodump command not found. Skipping MongoDB backup."
fi
msg_info "Backing up MinIO data..."
if [ -d "$MINIO_DATA_DIR" ]; then
# Ensure MinIO service is stopped or data is consistent before copying
# For simplicity, assuming data is quiesced if services are stopped during a full restore scenario
# For live backup, MinIO has mc mirror/backup tools, but that adds complexity here.
# Tarring MinIO data to preserve permissions and structure within the backup.
tar -czf "$BACKUP_TEMP_DIR/minio_data/minio_data.tar.gz" -C "$(dirname "$MINIO_DATA_DIR")" "$(basename "$MINIO_DATA_DIR")"
msg_ok "MinIO data backed up."
else
msg_warn "MinIO data directory $MINIO_DATA_DIR not found. Skipping MinIO data backup."
fi
msg_info "Creating final backup archive: $FINAL_ARCHIVE_PATH"
if tar -czf "$FINAL_ARCHIVE_PATH" -C "/tmp" "$BACKUP_FILENAME_BASE"; then
msg_ok "Backup archive created successfully."
else
msg_error "Failed to create final backup archive."
rm -rf "$BACKUP_TEMP_DIR"
exit 1
fi
msg_info "Cleaning up temporary backup files..."
rm -rf "$BACKUP_TEMP_DIR"
msg_ok "Cleanup complete."
echo -e "${INFO} Backup created at: ${GN}$FINAL_ARCHIVE_PATH${CL}"
}
function restore_script() {
header_info
# check_container_storage (ensure enough space for restore)
LATEST_BACKUP=$(ls -t $ARCHIVE_DESTINATION/huly-native-backup-*.tar.gz 2>/dev/null | head -n1)
if [[ -z "$LATEST_BACKUP" ]]; then
msg_error "No Huly native backup archive found in $ARCHIVE_DESTINATION/!"
exit 1
fi
msg_info "Found latest backup: $LATEST_BACKUP"
RESTORE_TEMP_DIR="/tmp/huly-restore-extract-$(date +%s)"
mkdir -p "$RESTORE_TEMP_DIR"
msg_info "Extracting backup archive to $RESTORE_TEMP_DIR..."
if ! tar -xzf "$LATEST_BACKUP" -C "$RESTORE_TEMP_DIR"; then
msg_error "Failed to extract backup archive."
rm -rf "$RESTORE_TEMP_DIR"
exit 1
fi
# The archive was created with -C /tmp backup_base_name. So files are in $RESTORE_TEMP_DIR/backup_base_name/
EXTRACTED_CONTENT_DIR_NAME=$(ls "$RESTORE_TEMP_DIR")
ACTUAL_RESTORE_DATA_DIR="$RESTORE_TEMP_DIR/$EXTRACTED_CONTENT_DIR_NAME"
if [ ! -d "$ACTUAL_RESTORE_DATA_DIR/config" ]; then
msg_error "Extracted archive does not contain expected 'config' directory. Restore aborted."
rm -rf "$RESTORE_TEMP_DIR"
exit 1
fi
stop_all_services
msg_info "Restoring Huly configuration..."
# Restore native.conf, huly.creds, version.txt
cp -f "$ACTUAL_RESTORE_DATA_DIR/config/native.conf" "$HULY_CONFIG_FILE" 2>/dev/null || msg_warn "native.conf not found in backup."
cp -f "$ACTUAL_RESTORE_DATA_DIR/config/huly.creds" "$HULY_CREDS_FILE" 2>/dev/null || msg_warn "huly.creds not found in backup."
cp -f "$ACTUAL_RESTORE_DATA_DIR/config/version.txt" "$HULY_VERSION_FILE" 2>/dev/null || msg_warn "version.txt not found in backup."
# chown/chmod if necessary, e.g., chmod 600 $HULY_CREDS_FILE
msg_ok "Huly configuration restored."
msg_info "Restoring MongoDB database (huly)..."
if [ -f "$ACTUAL_RESTORE_DATA_DIR/mongodb/huly.gz" ]; then
if command -v mongorestore >/dev/null 2>&1; then
# Stop mongod if it was auto-restarted by systemd after the global stop
systemctl stop mongod 2>/dev/null
# It is critical that mongod is running for mongorestore to connect.
# However, we need to ensure it's clean. A --drop is often used.
# For simplicity, start it, restore, then it will be part of start_all_services.
systemctl start mongod
sleep 2 # Give MongoDB a moment to start
if mongorestore --db=huly --archive="$ACTUAL_RESTORE_DATA_DIR/mongodb/huly.gz" --gzip --drop; then
msg_ok "MongoDB restore successful."
else
msg_warn "MongoDB restore (mongorestore) failed. Check MongoDB logs."
fi
else
msg_warn "mongorestore command not found. Skipping MongoDB restore."
fi
else
msg_warn "MongoDB backup file (huly.gz) not found in archive. Skipping MongoDB restore."
fi
msg_info "Restoring MinIO data..."
if [ -f "$ACTUAL_RESTORE_DATA_DIR/minio_data/minio_data.tar.gz" ]; then
# Stop minio if it was auto-restarted
systemctl stop minio 2>/dev/null
rm -rf "${MINIO_DATA_DIR:?}/"* # Clear existing MinIO data
mkdir -p "$MINIO_DATA_DIR"
if tar -xzf "$ACTUAL_RESTORE_DATA_DIR/minio_data/minio_data.tar.gz" -C "$(dirname "$MINIO_DATA_DIR")"; then
# Ensure correct ownership for MinIO data dir (user minio)
chown -R minio:minio "$MINIO_DATA_DIR"
msg_ok "MinIO data restored."
else
msg_warn "Failed to extract MinIO data from archive."
fi
else
msg_warn "MinIO data archive (minio_data.tar.gz) not found in backup. Skipping MinIO data restore."
fi
# Note: Application files in /opt/huly/* are NOT part of this backup/restore.
# The update script is responsible for fetching/extracting these.
# This backup focuses on user data and configurations.
msg_info "Cleaning up temporary restore files..."
rm -rf "$RESTORE_TEMP_DIR"
msg_ok "Cleanup complete."
start_all_services
msg_ok "Restore completed from $LATEST_BACKUP. All services restarted."
}
# Standard Proxmox VE script functions (start, build_container, description)
start # This function is from build.func
build_container # This function is from build.func
description # This function is from build.func
# Final messages to user after CT creation
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} LXC container has been successfully created!${CL}"
echo -e "${INFO}${YW} Access Huly via Nginx reverse proxy at:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL} (or your configured domain name)"
if [ -f "$HULY_CREDS_FILE" ]; then
echo -e "${INFO}${YW} Initial admin credentials and keys are in: ${HULY_CREDS_FILE} (inside the CT). Secure them!${CL}"
fi
echo -e "${INFO}${YW} MinIO Console (if needed directly): http://${IP}:9001${CL}"
echo -e "${INFO}${YW} Refer to Huly documentation for first-time setup and usage: https://github.com/hcengineering/huly-selfhost${CL}"

104
ct/profilarr.sh Normal file
View File

@ -0,0 +1,104 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2025 community-scripts ORG
# Author: GitHub Copilot
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/Dictionarry-Hub/profilarr
# Import Functions and Setup
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
APPLICATION="profilarr"
msg_info "Installing Dependencies"
$STD apt-get install -y \
curl \
wget \
git \
unzip \
build-essential \
python3 \
python3-pip \
python3-venv \
ca-certificates \
gnupg
msg_ok "Installed Dependencies"
msg_info "Installing Node.js"
$STD mkdir -p /etc/apt/keyrings
$STD curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
$STD echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
$STD apt-get update
$STD apt-get install -y nodejs
msg_ok "Installed Node.js"
msg_info "Setup ${APPLICATION}"
RELEASE=$(curl -fsSL https://api.github.com/repos/Dictionarry-Hub/profilarr/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4)}')
temp_file=$(mktemp)
$STD curl -fsSL -o "$temp_file" "https://github.com/Dictionarry-Hub/profilarr/archive/refs/tags/v${RELEASE}.zip"
cd /tmp
$STD unzip -q "$temp_file"
$STD mkdir -p /opt/${APPLICATION}
$STD mkdir -p /opt/${APPLICATION}_config
$STD mv "profilarr-${RELEASE}/backend" /opt/${APPLICATION}/
$STD mv "profilarr-${RELEASE}/frontend" /opt/${APPLICATION}/
$STD chown -R root:root /opt/${APPLICATION}
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_ok "Setup ${APPLICATION}"
msg_info "Setting up Python Virtual Environment"
$STD python3 -m venv /opt/${APPLICATION}/venv
$STD /opt/${APPLICATION}/venv/bin/pip install --upgrade pip
cd /opt/${APPLICATION}/backend
$STD /opt/${APPLICATION}/venv/bin/pip install -r requirements.txt
$STD /opt/${APPLICATION}/venv/bin/pip install gunicorn
msg_ok "Setup Python Environment"
msg_info "Building Frontend"
cd /opt/${APPLICATION}/frontend
$STD npm install
$STD npm run build
$STD mkdir -p /opt/${APPLICATION}/backend/app/static
$STD cp -r dist/* /opt/${APPLICATION}/backend/app/static/
msg_ok "Built Frontend"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/${APPLICATION}.service
[Unit]
Description=Profilarr Profile Manager
After=network.target
[Service]
Type=simple
User=root
WorkingDirectory=/opt/${APPLICATION}/backend
Environment=PATH=/opt/${APPLICATION}/venv/bin
Environment=CONFIG_PATH=/opt/${APPLICATION}_config
ExecStart=/opt/${APPLICATION}/venv/bin/gunicorn --bind 0.0.0.0:6868 --workers 2 --timeout 120 app.main:create_app()
Restart=always
RestartSec=10
[Install]
WantedBy=multi-user.target
EOF
$STD systemctl daemon-reload
$STD systemctl enable ${APPLICATION}.service
$STD systemctl start ${APPLICATION}.service
msg_ok "Created Service"
motd_ssh
customize
msg_info "Cleaning up"
rm -f "$temp_file"
rm -rf "/tmp/profilarr-${RELEASE}"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

View File

@ -1,10 +0,0 @@
node_modules/
data/
*.env
.gitignore
.github/
**Dockerfile**
.dockerignore

View File

@ -1,13 +0,0 @@
# These are supported funding model platforms
github: vogler # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: fgc # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: vogler # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: vogler # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
custom: ["https://www.buymeacoffee.com/vogler", "https://paypal.me/voglerr"] # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

View File

@ -1,28 +0,0 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "weekly"
# commit-message:
# prefix: "npm"
# include: "scope"
- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "weekly"
# commit-message:
# prefix: "docker"
# include: "scope"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
# commit-message:
# prefix: "github-actions"
# include: "scope"

View File

@ -1,75 +0,0 @@
name: Build and push Docker image (amd64, arm64 to hub.docker.com and ghcr.io)
on:
workflow_dispatch: # allow manual trigger
# https://github.com/orgs/community/discussions/26276
push:
branches:
- "main"
- "v*"
tags:
- "v*"
paths: # ignore changes to certain files
- '**'
- '!*.md'
# - '!.github/**'
pull_request: # runs when opened/reopned or when the head branch is updated, see https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
branches:
- "main" # only PRs against main
jobs:
docker:
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v4
-
name: Set environment variables
run: |
BRANCH="${GITHUB_REF#refs/heads/}"
echo "BRANCH=$BRANCH" >> "$GITHUB_ENV"
echo "NOW=$(date -R)" >> "$GITHUB_ENV" # date -Iseconds; date +'%Y-%m-%dT%H:%M:%S'
if [[ "$BRANCH" == "main" ]]; then
echo "IMAGE_TAG=latest" >> "$GITHUB_ENV"
else
echo "IMAGE_TAG=$BRANCH" >> "$GITHUB_ENV"
fi
-
name: Set up QEMU
uses: docker/setup-qemu-action@v3
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
-
name: Login to Docker Hub
uses: docker/login-action@v3
if: github.event_name != 'pull_request' # TODO if DOCKERHUB_* are set?
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }} # actor is user that opened PR, was repository_owner before
password: ${{ secrets.GITHUB_TOKEN }}
-
name: Build and push
uses: docker/build-push-action@v6
# if: github.event_name != 'pull_request' # still want to build image
with:
context: .
push: ${{ github.event_name != 'pull_request' }} # TODO push for forks?
build-args: |
COMMIT=${{ github.sha }}
BRANCH=${{ env.BRANCH }}
NOW=${{ env.NOW }}
platforms: linux/amd64,linux/arm64 # ,linux/arm/v7
# TODO docker tag only if DOCKERHUB_* are set?
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/free-games-claimer:${{env.IMAGE_TAG}}
ghcr.io/${{ github.actor }}/free-games-claimer:${{env.IMAGE_TAG}}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@ -1,36 +0,0 @@
# https://github.com/marketplace/actions/super-linter#get-started
name: Lint
on: # yamllint disable-line rule:truthy
push: null
pull_request: null
permissions: {}
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
permissions:
contents: read
packages: read
# To report GitHub Actions status checks
statuses: write
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
# super-linter needs the full git history to get the
# list of files that changed across commits
fetch-depth: 0
- name: Super-linter
uses: super-linter/super-linter/slim@v7.3.0 # x-release-please-version
# TODO need to create problem matchers for each linter? https://github.com/rhysd/actionlint/blob/v1.7.7/docs/usage.md#problem-matchers
env:
# To report GitHub Actions status checks
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# TODO automatically fix linting issues and commit them for PRs
# fix-lint-issues: # https://github.com/marketplace/actions/super-linter#github-actions-workflow-example-pull-request

View File

@ -1,37 +0,0 @@
on:
# Trigger analysis when pushing in main or pull requests, and when creating a pull request.
push:
branches:
- main
pull_request:
types: [opened, synchronize, reopened]
name: Sonar
jobs:
sonarcloud:
runs-on: ubuntu-latest
steps:
-
uses: actions/checkout@v4
with:
# Disabling shallow clone is recommended for improving relevancy of reporting. Otherwise sonarcloud will show a warning.
fetch-depth: 0
-
uses: actions/setup-node@v4
with:
cache: 'npm'
-
name: Install dev dependencies which includde ESLint + plugins
run: npm install --only=dev
-
name: Run ESLint
continue-on-error: true
run: npx eslint . -f json -o eslint_report.json
-
name: Fix ESLint paths
run: sed -i 's+/home/runner/work/free-games-claimer/free-games-claimer+/github/workspace+g' eslint_report.json
-
name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}

View File

@ -1,3 +0,0 @@
node_modules/
data/
*.env

View File

@ -1,10 +0,0 @@
{
// https://eslint.style/guide/faq#vs-code
"editor.formatOnSave": true,
"editor.formatOnSaveMode": "modifications",
"editor.codeActionsOnSave": {
"source.fixAll.eslint": "explicit"
},
"eslint.experimental.useFlatConfig": true,
"eslint.codeActionsOnSave.rules": null,
}

View File

@ -1,6 +0,0 @@
# Contribute
## Building and publishing docker images
Setup the secrets for DOCKERHUB_USERNAME and [DOCKERHUB_TOKEN](https://hub.docker.com/settings/security) in https://github.com/YOUR_USERNAME/free-games-claimer/settings/secrets/actions to be able to run the docker.yml workflows.
Check if under Workflow Permissions in https://github.com/YOUR_USERNAME/free-games-claimer/settings/actions the radio button is set to "Read and write permissions". In case that's not set the push to ghcr.io will fail.

View File

@ -1,104 +0,0 @@
# FROM mcr.microsoft.com/playwright:v1.20.0
# Partially from https://github.com/microsoft/playwright/blob/main/utils/docker/Dockerfile.focal
FROM ubuntu:jammy
# Configuration variables are at the end!
# https://github.com/hadolint/hadolint/wiki/DL4006
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
ARG DEBIAN_FRONTEND=noninteractive
# Install up-to-date node & npm, deps for virtual screen & noVNC, firefox, pip for apprise.
RUN apt-get update \
&& apt-get install --no-install-recommends -y curl ca-certificates gnupg \
&& mkdir -p /etc/apt/keyrings \
&& curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg \
&& echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list \
&& apt-get update \
&& apt-get install --no-install-recommends -y \
nodejs \
xvfb \
x11vnc \
tini \
novnc websockify \
dos2unix \
python3-pip \
# && npx playwright install-deps firefox \
&& apt-get install --no-install-recommends -y \
libgtk-3-0 \
libasound2 \
libxcomposite1 \
libpangocairo-1.0-0 \
libpango-1.0-0 \
libatk1.0-0 \
libcairo-gobject2 \
libcairo2 \
libgdk-pixbuf-2.0-0 \
libdbus-glib-1-2 \
libxcursor1 \
&& apt-get autoremove -y \
&& apt-get clean \
&& rm -rf \
/tmp/* \
/usr/share/doc/* \
/var/cache/* \
/var/lib/apt/lists/* \
/var/tmp/*
# RUN node --version
# RUN npm --version
RUN ln -s /usr/share/novnc/vnc_auto.html /usr/share/novnc/index.html
RUN pip install apprise
WORKDIR /fgc
COPY package*.json ./
# Playwright installs patched firefox to ~/.cache/ms-playwright/firefox-*
# Requires some system deps to run (see inlined install-deps above).
RUN npm install
# Old: PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD + install firefox (had to be done after `npm install` to get the correct version). Now: playwright-firefox as npm dep and `npm install` will only install that.
# From 1.38 Playwright will no longer install browser automatically for playwright, but apparently still for playwright-firefox: https://github.com/microsoft/playwright/releases/tag/v1.38.0
# RUN npx playwright install firefox
COPY . .
# Shell scripts need Linux line endings. On Windows, git might be configured to check out dos/CRLF line endings, so we convert them for those people in case they want to build the image. They could also use --config core.autocrlf=input
RUN dos2unix ./*.sh && chmod +x ./*.sh
COPY docker-entrypoint.sh /usr/local/bin/
ARG COMMIT=""
ARG BRANCH=""
ARG NOW=""
ENV COMMIT=${COMMIT}
ENV BRANCH=${BRANCH}
ENV NOW=${NOW}
LABEL org.opencontainers.image.title="free-games-claimer" \
org.opencontainers.image.name="free-games-claimer" \
org.opencontainers.image.description="Automatically claims free games on the Epic Games Store, Amazon Prime Gaming and GOG" \
org.opencontainers.image.url="https://github.com/vogler/free-games-claimer" \
org.opencontainers.image.source="https://github.com/vogler/free-games-claimer" \
org.opencontainers.image.revision=${COMMIT} \
org.opencontainers.image.ref.name=${BRANCH} \
org.opencontainers.image.base.name="ubuntu:jammy" \
org.opencontainers.image.version="latest"
# Configure VNC via environment variables:
ENV VNC_PORT 5900
ENV NOVNC_PORT 6080
EXPOSE 5900
EXPOSE 6080
# Configure Xvfb via environment variables:
ENV WIDTH 1920
ENV HEIGHT 1080
ENV DEPTH 24
# Show browser instead of running headless
ENV SHOW 1
# Script to setup display server & VNC is always executed.
ENTRYPOINT ["docker-entrypoint.sh"]
# Default command to run. This is replaced by appending own command, e.g. `docker run ... node prime-gaming` to only run this script.
CMD node epic-games; node prime-gaming; node gog

View File

@ -1,661 +0,0 @@
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.

View File

@ -1,224 +0,0 @@
<p align="center">
<img alt="logo-free-games-claimer" src="https://user-images.githubusercontent.com/493741/214588518-a4c89998-127e-4a8c-9b1e-ee4a9d075715.png" />
</p>
[![Code Smells](https://sonarcloud.io/api/project_badges/measure?project=vogler_free-games-claimer&metric=code_smells)](https://sonarcloud.io/project/overview?id=vogler_free-games-claimer)
# free-games-claimer
Claims free games periodically on
- <img src="https://github.com/user-attachments/assets/82e9e9bf-b6ac-4f20-91db-36d2c8429cb6" width="32" align="middle" /> [Epic Games Store](https://www.epicgames.com/store/free-games)
- <img src="https://github.com/user-attachments/assets/7627a108-20c6-4525-a1d8-5d221ee89d6e" width="32" align="middle" /> [Amazon Prime Gaming](https://gaming.amazon.com)
- <img src="https://github.com/user-attachments/assets/49040b50-ee14-4439-8e3c-e93cafd7c3a5" width="32" align="middle" /> [GOG](https://www.gog.com)
- <img src="https://github.com/user-attachments/assets/3582444b-f23b-448d-bf31-01668cd0313a" width="32" align="middle" /> [Unreal Engine (Assets)](https://www.unrealengine.com/marketplace/en-US/assets?count=20&sortBy=effectiveDate&sortDir=DESC&start=0&tag=4910) ([experimental](https://github.com/vogler/free-games-claimer/issues/44), same login as Epic Games)
<!-- - <img src="https://www.freepnglogos.com/uploads/xbox-logo-picture-png-14.png" width="32"/> [Xbox Live Games with Gold](https://www.xbox.com/en-US/live/gold#gameswithgold) ([experimental](https://github.com/vogler/free-games-claimer/issues/19)) -->
Pull requests welcome :)
![Telegram Screenshot](https://user-images.githubusercontent.com/493741/214667078-eb5c1877-2bdd-40c1-b94e-4a50d6852c06.png)
_Works on Windows/macOS/Linux._
Raspberry Pi (3, 4, Zero 2): [requires 64-bit OS](https://github.com/vogler/free-games-claimer/issues/3) like Raspberry Pi OS or Ubuntu (Raspbian won't work since it's 32-bit).
## How to run
Easy option: [install Docker](https://docs.docker.com/get-docker/) (or [podman](https://podman-desktop.io/)) and run this command in a terminal:
```
docker run --rm -it -p 6080:6080 -v fgc:/fgc/data --pull=always ghcr.io/vogler/free-games-claimer
```
_This currently gives you a captcha challenge for epic-games. Until [issue #183](https://github.com/vogler/free-games-claimer/issues/183) is fixed, it is recommended to just run `node epic-games` without docker (see below)._
This will run `node epic-games; node prime-gaming; node gog` - if you only want to claim games for one of the stores, you can override the default command by appending e.g. `node epic-games` at the end of the `docker run` command, or if you want several `bash -c "node epic-games.js; node gog.js"`.
Data (including json files with claimed games, codes to redeem, screenshots) is stored in the Docker volume `fgc`.
<details>
<summary>I want to run without Docker or develop locally.</summary>
1. [Install Node.js](https://nodejs.org/en/download)
2. Clone/download this repository and `cd` into it in a terminal
3. Run `npm install`
4. Run `pip install apprise` (or use [pipx](https://github.com/pypa/pipx) if you have [problems](https://stackoverflow.com/questions/75608323/how-do-i-solve-error-externally-managed-environment-every-time-i-use-pip-3)) to install [apprise](https://github.com/caronc/apprise) if you want notifications
5. To get updates: `git pull; npm install`
6. Run `node epic-games`, `node prime-gaming`, `node gog`...
During `npm install` Playwright will download its Firefox to a cache in home ([doc](https://playwright.dev/docs/browsers#managing-browser-binaries)).
If you are missing some dependencies for the browser on your system, you can use `sudo npx playwright install firefox --with-deps`.
If you don't want to use Docker for quasi-headless mode, you could run inside a virtual machine, on a server, or you wake your PC at night to avoid being interrupted.
</details>
## Usage
All scripts start an automated Firefox instance, either with the browser GUI shown or hidden (*headless mode*). By default, you won't see any browser open on your host system.
- When running inside Docker, the browser will be shown only inside the container. You can open http://localhost:6080 to interact with the browser running inside the container via noVNC (or use other VNC clients on port 5900).
- When running the scripts outside of Docker, the browser will be hidden by default; you can use `SHOW=1 ...` to show the UI (see options below).
When running the first time, you have to login for each store you want to claim games on.
You can login indirectly via the terminal or directly in the browser. The scripts will wait until you are successfully logged in.
There will be prompts in the terminal asking you to enter email, password, and afterwards some OTP (one time password / security code) if you have 2FA/MFA (two-/multi-factor authentication) enabled. If you want to login yourself via the browser, you can press escape in the terminal to skip the prompts.
After login, the script will continue claiming the current games. If it still waits after you are already logged in, you can restart it (and open an issue). If you run the scripts regularly, you should not have to login again.
### Configuration / Options
Options are set via [environment variables](https://kinsta.com/knowledgebase/what-is-an-environment-variable/) which allow for flexible configuration.
TODO: ~~On the first run, the script will guide you through configuration and save all settings to `data/config.env`. You can edit this file directly or run `node fgc config` to run the configuration assistant again.~~
Available options/variables and their default values:
| Option | Default | Description |
|--------------- |--------- |------------------------------------------------------------------------ |
| SHOW | 1 | Show browser if 1. Default for Docker, not shown when running outside. |
| WIDTH | 1280 | Width of the opened browser (and of screen for VNC in Docker). |
| HEIGHT | 1280 | Height of the opened browser (and of screen for VNC in Docker). |
| VNC_PASSWORD | | VNC password for Docker. No password used by default! |
| NOTIFY | | Notification services to use (Pushover, Slack, Telegram...), see below. [Apprise](https://github.com/caronc/apprise) |
| NOTIFY_TITLE | | Optional title for notifications, e.g. for Pushover. |
| BROWSER_DIR | data/browser | Directory for browser profile, e.g. for multiple accounts. |
| TIMEOUT | 60 | Timeout for any page action. Should be fine even on slow machines. |
| LOGIN_TIMEOUT | 180 | Timeout for login in seconds. Will wait twice (prompt + manual login). |
| EMAIL | | Default email for any login. |
| PASSWORD | | Default password for any login. |
| EG_EMAIL | | Epic Games email for login. Overrides EMAIL. |
| EG_PASSWORD | | Epic Games password for login. Overrides PASSWORD. |
| EG_OTPKEY | | Epic Games MFA OTP key. |
| EG_PARENTALPIN | | Epic Games Parental Controls PIN. |
| PG_EMAIL | | Prime Gaming email for login. Overrides EMAIL. |
| PG_PASSWORD | | Prime Gaming password for login. Overrides PASSWORD. |
| PG_OTPKEY | | Prime Gaming MFA OTP key. |
| PG_REDEEM | 0 | Prime Gaming: try to redeem keys on external stores ([experimental](https://github.com/vogler/free-games-claimer/issues/5)). |
| PG_CLAIMDLC | 0 | Prime Gaming: try to claim DLCs ([experimental](https://github.com/vogler/free-games-claimer/issues/55)). |
| GOG_EMAIL | | GOG email for login. Overrides EMAIL. |
| GOG_PASSWORD | | GOG password for login. Overrides PASSWORD. |
| GOG_NEWSLETTER | 0 | Do not unsubscribe from newsletter after claiming a game if 1. |
| LG_EMAIL | | Legacy Games: email to use for redeeming (if not set, defaults to PG_EMAIL) |
See `src/config.js` for all options.
#### How to set options
You can add options directly in the command or put them in a file to load.
##### Docker
You can pass variables using `-e VAR=VAL`, for example `docker run -e EMAIL=foo@bar.baz -e NOTIFY='tgram://bottoken/ChatID' ...` or using `--env-file fgc.env` where `fgc.env` is a file on your host system (see [docs](https://docs.docker.com/engine/reference/commandline/run/#env)). You can also `docker cp` your configuration file to `/fgc/data/config.env` in the `fgc` volume to store it with the rest of the data instead of on the host ([example](https://github.com/moby/moby/issues/25245#issuecomment-365980572)).
If you are using [docker compose](https://docs.docker.com/compose/environment-variables/) (or Portainer etc.), you can put options in the `environment:` section.
##### Without Docker
On Linux/macOS you can prefix the variables you want to set, for example `EMAIL=foo@bar.baz SHOW=1 node epic-games` will show the browser and skip asking you for your login email. On Windows you have to use `set`, [example](https://github.com/vogler/free-games-claimer/issues/314).
You can also put options in `data/config.env` which will be loaded by [dotenv](https://github.com/motdotla/dotenv).
### Notifications
The scripts will try to send notifications for successfully claimed games and any errors like needing to log in or encountered captchas (should not happen).
[apprise](https://github.com/caronc/apprise) is used for notifications and offers many services including Pushover, Slack, Telegram, SMS, Email, desktop and custom notifications.
You just need to set `NOTIFY` to the notification services you want to use, e.g. `NOTIFY='mailto://myemail:mypass@gmail.com' 'pbul://o.gn5kj6nfhv736I7jC3cj3QLRiyhgl98b'` - refer to their list of services and [examples](https://github.com/caronc/apprise#command-line-usage).
### Automatic login, two-factor authentication
If you set the options for email, password and OTP key, there will be no prompts and logins should happen automatically. This is optional since all stores should stay logged in since cookies are refreshed.
To get the OTP key, it is easiest to follow the store's guide for adding an authenticator app. You should also scan the shown QR code with your favorite app to have an alternative method for 2FA.
- **Epic Games**: visit [password & security](https://www.epicgames.com/account/password), enable 'third-party authenticator app', copy the 'Manual Entry Key' and use it to set `EG_OTPKEY`.
- **Prime Gaming**: visit Amazon 'Your Account Login & security', 2-step verification Manage Add new app Can't scan the barcode, copy the bold key and use it to set `PG_OTPKEY`
- **GOG**: only offers OTP via email
<!-- - **Xbox**: visit [additional security](https://account.live.com/proofs/manage/additional) > Add a new way to sign in or verify > Use an app > Set up a different Authenticator app > I can't scan the bar code > copy the bold key and use it to set `XBOX_OTPKEY` -->
Beware that storing passwords and OTP keys as clear text may be a security risk. Use a unique/generated password! TODO: maybe at least offer to base64 encode for storage.
### Epic Games Store
Run `node epic-games` (locally or in Docker).
### Amazon Prime Gaming
Run `node prime-gaming` (locally or in Docker).
Claiming the Amazon Games works out-of-the-box, however, for games on external stores you need to either link your account or redeem a key.
- Stores that require account linking: Epic Games, Battle.net, Origin.
- Stores that require redeeming a key: GOG.com, Microsoft Games, Legacy Games.
Keys and URLs are printed to the console, included in notifications and saved in `data/prime-gaming.json`. A screenshot of the page with the key is also saved to `data/screenshots`.
[TODO](https://github.com/vogler/free-games-claimer/issues/5): ~~redeem keys on external stores.~~
<!-- ### Xbox Games With Gold -->
<!-- Run `node xbox` (locally or in docker). -->
### Run periodically
#### How often?
Epic Games usually has two free games *every week*, before Christmas every day.
Prime Gaming has new games *every month* or more often during Prime days.
GOG usually has one new game every couples of weeks.
Unreal Engine has new assets to claim *every first Tuesday of a month*.
<!-- Xbox usually has two games *every month*. -->
It is safe to run the scripts every day.
#### How to schedule?
The container/scripts will claim currently available games and then exit.
If you want it to run regularly, you have to schedule the runs yourself:
- Linux/macOS: `crontab -e` ([example](https://github.com/vogler/free-games-claimer/discussions/56))
- macOS: [launchd](https://stackoverflow.com/questions/132955/how-do-i-set-a-task-to-run-every-so-often)
- Windows: [task scheduler](https://active-directory-wp.com/docs/Usage/How_to_add_a_cron_job_on_Windows/Scheduled_tasks_and_cron_jobs_on_Windows/index.html) ([example](https://github.com/vogler/free-games-claimer/wiki/%5BHowTo%5D-Schedule-runs-on-Windows)), [other options](https://stackoverflow.com/questions/132971/what-is-the-windows-version-of-cron), or just put the command in a `.bat` file in Autostart if you restart often...
- any OS: use a process manager like [pm2](https://pm2.keymetrics.io/docs/usage/restart-strategies/)
- Docker Compose `command: bash -c "node epic-games; node prime-gaming; node gog; echo sleeping; sleep 1d"` additionally add `restart: unless-stopped` to it.
TODO: ~~add some server-mode where the script just keeps running and claims games e.g. every day.~~
### Problems?
Check the open [issues](https://github.com/vogler/free-games-claimer/issues) and comment there or open a new issue.
If you're a developer, you can use `PWDEBUG=1 ...` to [inspect](https://playwright.dev/docs/inspector) which opens a debugger where you can step through the script.
## History/DevLog
<details>
<summary>Click to expand</summary>
Tried [epicgames-freebies-claimer](https://github.com/Revadike/epicgames-freebies-claimer), but had problems since epicgames introduced hcaptcha (see [issue](https://github.com/Revadike/epicgames-freebies-claimer/issues/172)).
Played around with puppeteer before, now trying newer https://playwright.dev which is pretty similar.
Playwright Inspector and `codegen` to generate scripts are nice, but failed to generate the right code for clicking a button in an iframe.
Added [main.spec.ts](https://github.com/vogler/epicgames-claimer/commit/e5ce7916ab6329cfc7134677c4d89c2b3fa3ba97#diff-d18d03e9c407a20e05fbf03cbd6f9299857740544fb6b50d6a70b9c6fbc35831) which was the test script generated by `npx playwright codegen` with manual fix for clicking buttons in the created iframe. Can be executed by `npx playwright test`. The test runner has options `--debug` and `--timeout` and can execute typescript which is nice. However, this only worked up to the button 'I Agree', and then showed an hcaptcha.
Added [main.captcha.js](https://github.com/vogler/epicgames-claimer/commit/e5ce7916ab6329cfc7134677c4d89c2b3fa3ba97#diff-d18d03e9c407a20e05fbf03cbd6f9299857740544fb6b50d6a70b9c6fbc35831) which uses beta of `playwright-extra@next` and `@extra/recaptcha@next` (from [comment on puppeteer-extra](https://github.com/berstend/puppeteer-extra/pull/303#issuecomment-775277480)).
However, `playwright-extra` seems to be old and missing `:has-text` selector (fixed [here](https://github.com/vogler/epicgames-claimer/commit/ba97a0e840b65f4476cca18e28d8461b0c703420)) and `page.frameLocator`, so the script did not run without adjustments.
Also, solving via [2captcha](https://2captcha.com?from=13225256) is a paid service which takes time and may be unreliable.
<!-- Alternative: https://anti-captcha.com -->
Added [main.stealth.js](https://github.com/vogler/epicgames-claimer/commit/64d0ba8ce71baec3947d1b64acd567befcb39340#diff-f70d3bd29df4a343f11062a97063953173491ce30fe34f69a0fc52517adbf342) which uses the stealth plugin without `playwright-extra` wrapper but up-to-date `playwright` (from [comment](https://github.com/berstend/puppeteer-extra/issues/454#issuecomment-917437212)).
The listed evasions are enough to not show an hcaptcha. Script claimed game successfully in non-headless mode.
Removed `main.captcha.js`.
Using Playwright Test (`main.spec.ts`) instead of Library (`main.stealth.js`) has the advantage of free CLI like `--debug` and `--timeout`.
<!-- TODO: check if stealth plugin can be setup with `contextOptions` ([doc](https://playwright.dev/docs/test-configuration#more-browser-and-context-options)). -->
Button selectors should preferably use text in order to be more stable against changes in the DOM.
Renamed repository from epicgames-claimer to free-games-claimer since a script for Amazon Prime Gaming was also added. Removed all old scripts in favor of just `epic-games.js` and `prime-gaming.js`.
epic games: `headless` mode gets hcaptcha challenge. More details/references in [issue](https://github.com/vogler/free-games-claimer/issues/2).
https://github.com/vogler/free-games-claimer/pull/11 introduced a Dockerfile for running non-headless inside the container via xvfb which makes it headless for the host running the container.
v1.0 Standalone scripts node epic-games and node prime-gaming using Chromium.
Changed to Firefox for all scripts since Chromium led to captchas. Claiming then also worked in headless mode without Docker.
Added options via env vars, configurable in `data/config.env`.
Added OTP generation via otplib for automatic login, even with 2FA.
Added notifications via [apprise](https://github.com/caronc/apprise).
</details>
[![Star History Chart](https://api.star-history.com/svg?repos=vogler/free-games-claimer&type=Date)](https://star-history.com/#vogler/free-games-claimer&Date)
<!-- [![Stargazers over time](https://starchart.cc/vogler/free-games-claimer.svg?variant=adaptive)](https://starchart.cc/vogler/free-games-claimer) -->
![Alt](https://repobeats.axiom.co/api/embed/a1c5e6e420d90e0d6b34c1285e92a69a44138faa.svg "Repobeats analytics image")
---
Logo with smaller aspect ratio (for Telegram bot etc.): 👾 - [emojipedia](https://emojipedia.org/alien-monster/)
![logo-fgc](https://user-images.githubusercontent.com/493741/214589922-093d6557-6393-421c-b577-da58ff3671bc.png)

View File

@ -1,124 +0,0 @@
import { firefox } from 'playwright-firefox'; // stealth plugin needs no outdated playwright-extra
import { datetime, filenamify, prompt, handleSIGINT, stealth } from './src/util.js';
import { cfg } from './src/config.js';
// using https://github.com/apify/fingerprint-suite worked, but has no launchPersistentContext...
// from https://github.com/apify/fingerprint-suite/issues/162
import { FingerprintInjector } from 'fingerprint-injector';
import { FingerprintGenerator } from 'fingerprint-generator';
const { fingerprint, headers } = new FingerprintGenerator().getFingerprint({
devices: ["mobile"],
operatingSystems: ["android"],
});
const context = await firefox.launchPersistentContext(cfg.dir.browser, {
headless: cfg.headless,
// viewport: { width: cfg.width, height: cfg.height },
locale: 'en-US', // ignore OS locale to be sure to have english text for locators -> done via /en in URL
recordVideo: cfg.record ? { dir: 'data/record/', size: { width: cfg.width, height: cfg.height } } : undefined, // will record a .webm video for each page navigated; without size, video would be scaled down to fit 800x800
recordHar: cfg.record ? { path: `data/record/aliexpress-${filenamify(datetime())}.har` } : undefined, // will record a HAR file with network requests and responses; can be imported in Chrome devtools
handleSIGINT: false, // have to handle ourselves and call context.close(), otherwise recordings from above won't be saved
userAgent: fingerprint.navigator.userAgent,
viewport: {
width: fingerprint.screen.width,
height: fingerprint.screen.height,
},
extraHTTPHeaders: {
'accept-language': headers['accept-language'],
},
});
handleSIGINT(context);
// await stealth(context);
await new FingerprintInjector().attachFingerprintToPlaywright(context, { fingerprint, headers });
context.setDefaultTimeout(cfg.debug ? 0 : cfg.timeout);
const page = context.pages().length ? context.pages()[0] : await context.newPage(); // should always exist
const auth = async (url) => {
console.log('auth', url);
await page.goto(url, { waitUntil: 'domcontentloaded' });
// redirects to https://login.aliexpress.com/?return_url=https%3A%2F%2Fwww.aliexpress.com%2Fp%2Fcoin-pc-index%2Findex.html
await Promise.any([page.waitForURL(/.*login.aliexpress.com.*/).then(async () => {
// manual login
console.error('Not logged in! Will wait for 120s for you to login...');
// await page.waitForTimeout(120*1000);
// or try automated
page.locator('span:has-text("Switch account")').click().catch(_ => {}); // sometimes no longer logged in, but previous user/email is pre-selected -> in this case we want to go back to the classic login
const login = page.locator('.login-container');
const email = cfg.ae_email || await prompt({ message: 'Enter email' });
const emailInput = login.locator('input[label="Email or phone number"]');
await emailInput.fill(email);
await emailInput.blur(); // otherwise Continue button stays disabled
const continueButton = login.locator('button:has-text("Continue")');
await continueButton.click({ force: true }); // normal click waits for button to no longer be covered by their suggestion menu, so we have to force click somewhere for the menu to close and then click
await continueButton.click();
const password = email && (cfg.ae_password || await prompt({ type: 'password', message: 'Enter password' }));
await login.locator('input[label="Password"]').fill(password);
await login.locator('button:has-text("Sign in")').click();
const error = login.locator('.error-text');
error.waitFor().then(async _ => console.error('Login error:', await error.innerText()));
await page.waitForURL(url);
// await page.addLocatorHandler(page.getByRole('button', { name: 'Accept cookies' }), btn => btn.click());
page.getByRole('button', { name: 'Accept cookies' }).click().then(_ => console.log('Accepted cookies')).catch(_ => { });
}), page.locator('#nav-user-account').waitFor()]).catch(_ => {});
// await page.locator('#nav-user-account').hover();
// console.log('Logged in as:', await page.locator('.welcome-name').innerText());
};
// copied URLs from AliExpress app on tablet which has menu for the used webview
const urls = {
// works with desktop view, but stuck at 100% loading in mobile view:
coins: 'https://www.aliexpress.com/p/coin-pc-index/index.html',
// only work with mobile view:
grow: 'https://m.aliexpress.com/p/ae_fruit/index.html', // firefox: stuck at 60% loading, chrome: loads, but canvas
gogo: 'https://m.aliexpress.com/p/gogo-match-cc/index.html', // closes firefox?!
// only show notification to install the app
euro: 'https://m.aliexpress.com/p/european-cup/index.html', // doesn't load
merge: 'https://m.aliexpress.com/p/merge-market/index.html',
};
const coins = async () => {
// await auth(urls.coins);
await Promise.any([page.locator('.checkin-button').click(), page.locator('.addcoin').waitFor()]);
console.log('Coins:', await page.locator('.mycoin-content-right-money').innerText());
console.log('Streak:', await page.locator('.title-box').innerText());
console.log('Tomorrow:', await page.locator('.addcoin').innerText());
};
const grow = async () => {
await page.pause();
};
const gogo = async () => {
await page.pause();
};
const euro = async () => {
await page.pause();
};
const merge = async () => {
await page.pause();
};
try {
// await coins();
await [
// coins,
// grow,
// gogo,
// euro,
merge,
].reduce((a, f) => a.then(async _ => { await auth(urls[f.name]); await f(); console.log() }), Promise.resolve());
// await page.pause();
} catch (error) {
process.exitCode ||= 1;
console.error('--- Exception:');
console.error(error); // .toString()?
}
if (page.video()) console.log('Recorded video:', await page.video().path());
await context.close();

View File

@ -1,15 +0,0 @@
# start with `docker compose up`
services:
free-games-claimer:
container_name: fgc # is printed in front of every output line
image: ghcr.io/vogler/free-games-claimer # otherwise image name will be free-games-claimer-free-games-claimer
build: .
ports:
# - "5900:5900" # VNC server
- "6080:6080" # noVNC (browser-based VNC client)
volumes:
- fgc:/fgc/data
# command: bash -c "node epic-games; node gog"
environment:
# - EMAIL=foo@bar.org
# - NOTIFY='tgram://...'

View File

@ -1,55 +0,0 @@
#!/usr/bin/env bash
set -eo pipefail # exit on error, error on any fail in pipe (not just last cmd); add -x to print each cmd; see gist bash_strict_mode.md
echo "Version: https://github.com/vogler/free-games-claimer/tree/${COMMIT}"
[ ! -z $BRANCH ] && [ $BRANCH != "main" ] && echo "Branch: ${BRANCH}"
echo "Build: $NOW"
# Remove chromium profile lock.
# When running in docker and then killing it, on the next run chromium displayed a dialog to unlock the profile which made the script time out.
# Maybe due to changed hostname of container or due to how the docker container kills playwright - didn't check.
# https://bugs.chromium.org/p/chromium/issues/detail?id=367048
rm -f /fgc/data/browser/SingletonLock
# Firefox preferences are stored in $BROWSER_DIR/pref.js and can be overridden by a file user.js
# Since this file has to be in the volume (data/browser), we can't do this in Dockerfile.
mkdir -p /fgc/data/browser
# fix for 'Incorrect response' after solving a captcha correctly - https://github.com/vogler/free-games-claimer/issues/261#issuecomment-1868385830
# echo 'user_pref("privacy.resistFingerprinting", true);' > /fgc/data/browser/user.js
cat << EOT > /fgc/data/browser/user.js
user_pref("privacy.resistFingerprinting", true);
// user_pref("privacy.resistFingerprinting.letterboxing", true);
// user_pref("browser.contentblocking.category", "strict");
// user_pref("webgl.disabled", true);
EOT
# TODO disable session restore message?
# Remove X server display lock, fix for `docker compose up` which reuses container which made it fail after initial run, https://github.com/vogler/free-games-claimer/issues/31
# echo $DISPLAY
# ls -l /tmp/.X11-unix/
rm -f /tmp/.X1-lock
# 6000+SERVERNUM is the TCP port Xvfb is listening on:
# SERVERNUM=$(echo "$DISPLAY" | sed 's/:\([0-9][0-9]*\).*/\1/')
# Options passed directly to the Xvfb server:
# -ac disables host-based access control mechanisms
# screen NUM WxHxD creates the screen and sets its width, height, and depth
export DISPLAY=:1 # need to export this, otherwise playwright complains with 'Looks like you launched a headed browser without having a XServer running.'
Xvfb $DISPLAY -ac -screen 0 "${WIDTH}x${HEIGHT}x${DEPTH}" &
echo "Xvfb display server created screen with resolution ${WIDTH}x${HEIGHT}"
if [ -z "$VNC_PASSWORD" ]; then
pw="-nopw"
pwt="no password!"
else
pw="-passwd $VNC_PASSWORD"
pwt="with password"
fi
x11vnc -display $DISPLAY -forever -shared -rfbport $VNC_PORT -bg $pw 2>/dev/null 1>&2
echo "VNC is running on port $VNC_PORT ($pwt)"
websockify -D --web "/usr/share/novnc/" $NOVNC_PORT "localhost:$VNC_PORT" 2>/dev/null 1>&2 &
echo "noVNC (VNC via browser) is running on http://localhost:$NOVNC_PORT"
echo
exec tini -g -- "$@" # https://github.com/krallin/tini/issues/8 node/playwright respond to signals like ctrl-c, but unsure about zombie processes

View File

@ -1,324 +0,0 @@
import { firefox } from 'playwright-firefox'; // stealth plugin needs no outdated playwright-extra
import { authenticator } from 'otplib';
import chalk from 'chalk';
import path from 'path';
import { existsSync, writeFileSync, appendFileSync } from 'fs';
import { resolve, jsonDb, datetime, stealth, filenamify, prompt, notify, html_game_list, handleSIGINT } from './src/util.js';
import { cfg } from './src/config.js';
const screenshot = (...a) => resolve(cfg.dir.screenshots, 'epic-games', ...a);
const URL_CLAIM = 'https://store.epicgames.com/en-US/free-games';
const URL_LOGIN = 'https://www.epicgames.com/id/login?lang=en-US&noHostRedirect=true&redirectUrl=' + URL_CLAIM;
console.log(datetime(), 'started checking epic-games');
const db = await jsonDb('epic-games.json', {});
if (cfg.time) console.time('startup');
const browserPrefs = path.join(cfg.dir.browser, 'prefs.js');
if (existsSync(browserPrefs)) {
console.log('Adding webgl.disabled to', browserPrefs);
appendFileSync(browserPrefs, 'user_pref("webgl.disabled", true);'); // apparently Firefox removes duplicates (and sorts), so no problem appending every time
} else {
console.log(browserPrefs, 'does not exist yet, will patch it on next run. Restart the script if you get a captcha.');
}
// https://playwright.dev/docs/auth#multi-factor-authentication
const context = await firefox.launchPersistentContext(cfg.dir.browser, {
headless: cfg.headless,
viewport: { width: cfg.width, height: cfg.height },
userAgent: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:127.0) Gecko/20100101 Firefox/127.0', // see replace of Headless in util.newStealthContext. TODO Windows UA enough to avoid 'device not supported'? update if browser is updated?
// userAgent firefox (macOS): Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:106.0) Gecko/20100101 Firefox/106.0
// userAgent firefox (docker): Mozilla/5.0 (X11; Linux aarch64; rv:109.0) Gecko/20100101 Firefox/115.0
locale: 'en-US', // ignore OS locale to be sure to have english text for locators
recordVideo: cfg.record ? { dir: 'data/record/', size: { width: cfg.width, height: cfg.height } } : undefined, // will record a .webm video for each page navigated; without size, video would be scaled down to fit 800x800
recordHar: cfg.record ? { path: `data/record/eg-${filenamify(datetime())}.har` } : undefined, // will record a HAR file with network requests and responses; can be imported in Chrome devtools
handleSIGINT: false, // have to handle ourselves and call context.close(), otherwise recordings from above won't be saved
// user settings for firefox have to be put in $BROWSER_DIR/user.js
args: [ // https://wiki.mozilla.org/Firefox/CommandLineOptions
// '-kiosk',
],
});
handleSIGINT(context);
// Without stealth plugin, the website shows an hcaptcha on login with username/password and in the last step of claiming a game. It may have other heuristics like unsuccessful logins as well. After <6h (TBD) it resets to no captcha again. Getting a new IP also resets.
await stealth(context);
if (!cfg.debug) context.setDefaultTimeout(cfg.timeout);
const page = context.pages().length ? context.pages()[0] : await context.newPage(); // should always exist
await page.setViewportSize({ width: cfg.width, height: cfg.height }); // TODO workaround for https://github.com/vogler/free-games-claimer/issues/277 until Playwright fixes it
// some debug info about the page (screen dimensions, user agent, platform)
// eslint-disable-next-line no-undef
if (cfg.debug) console.debug(await page.evaluate(() => [(({ width, height, availWidth, availHeight }) => ({ width, height, availWidth, availHeight }))(window.screen), navigator.userAgent, navigator.platform, navigator.vendor])); // deconstruct screen needed since `window.screen` prints {}, `window.screen.toString()` '[object Screen]', and can't use some pick function without defining it on `page`
if (cfg.debug_network) {
// const filter = _ => true;
const filter = r => r.url().includes('store.epicgames.com');
page.on('request', request => filter(request) && console.log('>>', request.method(), request.url()));
page.on('response', response => filter(response) && console.log('<<', response.status(), response.url()));
}
const notify_games = [];
let user;
try {
await context.addCookies([
{ name: 'OptanonAlertBoxClosed', value: new Date(Date.now() - 5 * 24 * 60 * 60 * 1000).toISOString(), domain: '.epicgames.com', path: '/' }, // Accept cookies to get rid of banner to save space on screen. Set accept time to 5 days ago.
{ name: 'HasAcceptedAgeGates', value: 'USK:9007199254740991,general:18,EPIC SUGGESTED RATING:18', domain: 'store.epicgames.com', path: '/' }, // gets rid of 'To continue, please provide your date of birth', https://github.com/vogler/free-games-claimer/issues/275, USK number doesn't seem to matter, cookie from 'Fallout 3: Game of the Year Edition'
]);
await page.goto(URL_CLAIM, { waitUntil: 'domcontentloaded' }); // 'domcontentloaded' faster than default 'load' https://playwright.dev/docs/api/class-page#page-goto
if (cfg.time) console.timeEnd('startup');
if (cfg.time) console.time('login');
// page.click('button:has-text("Accept All Cookies")').catch(_ => { }); // Not needed anymore since we set the cookie above. Clicking this did not always work since the message was animated in too slowly.
while (await page.locator('egs-navigation').getAttribute('isloggedin') != 'true') {
console.error('Not signed in anymore. Please login in the browser or here in the terminal.');
if (cfg.novnc_port) console.info(`Open http://localhost:${cfg.novnc_port} to login inside the docker container.`);
if (!cfg.debug) context.setDefaultTimeout(cfg.login_timeout); // give user some extra time to log in
console.info(`Login timeout is ${cfg.login_timeout / 1000} seconds!`);
await page.goto(URL_LOGIN, { waitUntil: 'domcontentloaded' });
if (cfg.eg_email && cfg.eg_password) console.info('Using email and password from environment.');
else console.info('Press ESC to skip the prompts if you want to login in the browser (not possible in headless mode).');
const notifyBrowserLogin = async () => {
console.log('Waiting for you to login in the browser.');
await notify('epic-games: no longer signed in and not enough options set for automatic login.');
if (cfg.headless) {
console.log('Run `SHOW=1 node epic-games` to login in the opened browser.');
await context.close(); // finishes potential recording
process.exit(1);
}
};
const email = cfg.eg_email || await prompt({ message: 'Enter email' });
if (!email) await notifyBrowserLogin();
else {
// await page.click('text=Sign in with Epic Games');
page.waitForSelector('.h_captcha_challenge iframe').then(async () => {
console.error('Got a captcha during login (likely due to too many attempts)! You may solve it in the browser, get a new IP or try again in a few hours.');
await notify('epic-games: got captcha during login. Please check.');
}).catch(_ => { });
page.waitForSelector('p:has-text("Incorrect response.")').then(async () => {
console.error('Incorrect response for captcha!');
}).catch(_ => { });
await page.fill('#email', email);
// await page.click('button[type="submit"]'); login was split in two steps for some time, now email and password are on the same form again
const password = email && (cfg.eg_password || await prompt({ type: 'password', message: 'Enter password' }));
if (!password) await notifyBrowserLogin();
else {
await page.fill('#password', password);
await page.click('button[type="submit"]');
}
const error = page.locator('#form-error-message');
error.waitFor().then(async () => {
console.error('Login error:', await error.innerText());
console.log('Please login in the browser!');
}).catch(_ => { });
// handle MFA, but don't await it
page.waitForURL('**/id/login/mfa**').then(async () => {
console.log('Enter the security code to continue - This appears to be a new device, browser or location. A security code has been sent to your email address at ...');
// TODO locator for text (email or app?)
const otp = cfg.eg_otpkey && authenticator.generate(cfg.eg_otpkey) || await prompt({ type: 'text', message: 'Enter two-factor sign in code', validate: n => n.toString().length == 6 || 'The code must be 6 digits!' }); // can't use type: 'number' since it strips away leading zeros and codes sometimes have them
await page.locator('input[name="code-input-0"]').pressSequentially(otp.toString());
await page.click('button[type="submit"]');
}).catch(_ => { });
}
await page.waitForURL(URL_CLAIM);
if (!cfg.debug) context.setDefaultTimeout(cfg.timeout);
}
user = await page.locator('egs-navigation').getAttribute('displayname'); // 'null' if !isloggedin
console.log(`Signed in as ${user}`);
db.data[user] ||= {};
if (cfg.time) console.timeEnd('login');
if (cfg.time) console.time('claim all games');
// Detect free games
const game_loc = page.locator('a:has(span:text-is("Free Now"))');
await game_loc.last().waitFor().catch(_ => {
// rarely there are no free games available -> catch Timeout
// TODO would be better to wait for alternative like 'coming soon' instead of waiting for timeout
// see https://github.com/vogler/free-games-claimer/issues/210#issuecomment-1727420943
console.error('Seems like currently there are no free games available in your region...');
// urls below should then be an empty list
});
// clicking on `game_sel` sometimes led to a 404, see https://github.com/vogler/free-games-claimer/issues/25
// debug showed that in those cases the href was still correct, so we `goto` the urls instead of clicking.
// Alternative: parse the json loaded to build the page https://store-site-backend-static-ipv4.ak.epicgames.com/freeGamesPromotions
// i.e. filter data.Catalog.searchStore.elements for .promotions.promotionalOffers being set and build URL with .catalogNs.mappings[0].pageSlug or .urlSlug if not set to some wrong id like it was the case for spirit-of-the-north-f58a66 - this is also what's done here: https://github.com/claabs/epicgames-freegames-node/blob/938a9653ffd08b8284ea32cf01ac8727d25c5d4c/src/puppet/free-games.ts#L138-L213
const urlSlugs = await Promise.all((await game_loc.elementHandles()).map(a => a.getAttribute('href')));
const urls = urlSlugs.map(s => 'https://store.epicgames.com' + s);
console.log('Free games:', urls);
for (const url of urls) {
if (cfg.time) console.time('claim game');
await page.goto(url); // , { waitUntil: 'domcontentloaded' });
const purchaseBtn = page.locator('button[data-testid="purchase-cta-button"] >> :has-text("e"), :has-text("i")').first(); // when loading, the button text is empty -> need to wait for some text {'get', 'in library', 'requires base game'} -> just wait for e or i to not be too specific; :text-matches("\w+") somehow didn't work - https://github.com/vogler/free-games-claimer/issues/375
await purchaseBtn.waitFor();
const btnText = (await purchaseBtn.innerText()).toLowerCase(); // barrier to block until page is loaded
// click Continue if 'This game contains mature content recommended only for ages 18+'
if (await page.locator('button:has-text("Continue")').count() > 0) {
console.log(' This game contains mature content recommended only for ages 18+');
if (await page.locator('[data-testid="AgeSelect"]').count()) {
console.error(' Got "To continue, please provide your date of birth" - This shouldn\'t happen due to cookie set above. Please report to https://github.com/vogler/free-games-claimer/issues/275');
await page.locator('#month_toggle').click();
await page.locator('#month_menu li:has-text("01")').click();
await page.locator('#day_toggle').click();
await page.locator('#day_menu li:has-text("01")').click();
await page.locator('#year_toggle').click();
await page.locator('#year_menu li:has-text("1987")').click();
}
await page.click('button:has-text("Continue")', { delay: 111 });
await page.waitForTimeout(2000);
}
let title;
let bundle_includes;
if (await page.locator('span:text-is("About Bundle")').count()) {
title = (await page.locator('span:has-text("Buy"):left-of([data-testid="purchase-cta-button"])').first().innerText()).replace('Buy ', '');
// h1 first didn't exist for bundles but now it does... However h1 would e.g. be 'Fallout® Classic Collection' instead of 'Fallout Classic Collection'
try {
bundle_includes = await Promise.all((await page.locator('.product-card-top-row h5').all()).map(b => b.innerText()));
} catch (e) {
console.error('Failed to get "Bundle Includes":', e);
}
} else {
title = await page.locator('h1').first().innerText();
}
const game_id = page.url().split('/').pop();
const existedInDb = db.data[user][game_id];
db.data[user][game_id] ||= { title, time: datetime(), url: page.url() }; // this will be set on the initial run only!
console.log('Current free game:', chalk.blue(title));
if (bundle_includes) console.log(' This bundle includes:', bundle_includes);
const notify_game = { title, url, status: 'failed' };
notify_games.push(notify_game); // status is updated below
if (btnText == 'in library') {
console.log(' Already in library! Nothing to claim.');
if (!existedInDb) await notify(`Game already in library: ${url}`);
notify_game.status = 'existed';
db.data[user][game_id].status ||= 'existed'; // does not overwrite claimed or failed
if (db.data[user][game_id].status.startsWith('failed')) db.data[user][game_id].status = 'manual'; // was failed but now it's claimed
} else if (btnText == 'requires base game') {
console.log(' Requires base game! Nothing to claim.');
notify_game.status = 'requires base game';
db.data[user][game_id].status ||= 'failed:requires-base-game';
// TODO claim base game if it is free
const baseUrl = 'https://store.epicgames.com' + await page.locator('a:has-text("Overview")').getAttribute('href');
console.log(' Base game:', baseUrl);
// await page.click('a:has-text("Overview")');
// TODO handle this via function call for base game above since this will never terminate if DRYRUN=1
urls.push(baseUrl); // add base game to the list of games to claim
urls.push(url); // add add-on itself again
} else { // GET
console.log(' Not in library yet! Click', btnText);
await purchaseBtn.click({ delay: 11 }); // got stuck here without delay (or mouse move), see #75, 1ms was also enough
// click Continue if 'Device not supported. This product is not compatible with your current device.' - avoided by Windows userAgent?
page.click('button:has-text("Continue")').catch(_ => { }); // needed since change from Chromium to Firefox?
// click 'Yes, buy now' if 'This edition contains something you already have. Still interested?'
page.click('button:has-text("Yes, buy now")').catch(_ => { });
// Accept End User License Agreement (only needed once)
page.locator(':has-text("end user license agreement")').waitFor().then(async () => {
console.log(' Accept End User License Agreement (only needed once)');
console.log(page.innerHTML);
console.log('Please report the HTML above here: https://github.com/vogler/free-games-claimer/issues/371');
await page.locator('input#agree').check(); // TODO Bundle: got stuck here; likely unrelated to bundle and locator just changed: https://github.com/vogler/free-games-claimer/issues/371
await page.locator('button:has-text("Accept")').click();
}).catch(_ => { });
// it then creates an iframe for the purchase
await page.waitForSelector('#webPurchaseContainer iframe'); // TODO needed?
const iframe = page.frameLocator('#webPurchaseContainer iframe');
// skip game if unavailable in region, https://github.com/vogler/free-games-claimer/issues/46 TODO check games for account's region
if (await iframe.locator(':has-text("unavailable in your region")').count() > 0) {
console.error(' This product is unavailable in your region!');
db.data[user][game_id].status = notify_game.status = 'unavailable-in-region';
if (cfg.time) console.timeEnd('claim game');
continue;
}
iframe.locator('.payment-pin-code').waitFor().then(async () => {
if (!cfg.eg_parentalpin) {
console.error(' EG_PARENTALPIN not set. Need to enter Parental Control PIN manually.');
notify('epic-games: EG_PARENTALPIN not set. Need to enter Parental Control PIN manually.');
}
await iframe.locator('input.payment-pin-code__input').first().pressSequentially(cfg.eg_parentalpin);
await iframe.locator('button:has-text("Continue")').click({ delay: 11 });
}).catch(_ => { });
if (cfg.debug) await page.pause();
if (cfg.dryrun) {
console.log(' DRYRUN=1 -> Skip order!');
notify_game.status = 'skipped';
if (cfg.time) console.timeEnd('claim game');
continue;
}
// Playwright clicked before button was ready to handle event, https://github.com/vogler/free-games-claimer/issues/84#issuecomment-1474346591
await iframe.locator('button:has-text("Place Order"):not(:has(.payment-loading--loading))').click({ delay: 11 });
// I Agree button is only shown for EU accounts! https://github.com/vogler/free-games-claimer/pull/7#issuecomment-1038964872
const btnAgree = iframe.locator('button:has-text("I Accept")');
btnAgree.waitFor().then(() => btnAgree.click()).catch(_ => { }); // EU: wait for and click 'I Agree'
try {
// context.setDefaultTimeout(100 * 1000); // give time to solve captcha, iframe goes blank after 60s?
const captcha = iframe.locator('#h_captcha_challenge_checkout_free_prod iframe');
captcha.waitFor().then(async () => { // don't await, since element may not be shown
// console.info(' Got hcaptcha challenge! NopeCHA extension will likely solve it.')
console.error(' Got hcaptcha challenge! Lost trust due to too many login attempts? You can solve the captcha in the browser or get a new IP address.');
// await notify(`epic-games: got captcha challenge right before claim of <a href="${url}">${title}</a>. Use VNC to solve it manually.`); // TODO not all apprise services understand HTML: https://github.com/vogler/free-games-claimer/pull/417
await notify(`epic-games: got captcha challenge for.\nGame link: ${url}`);
// TODO could even create purchase URL, see https://github.com/vogler/free-games-claimer/pull/130
// await page.waitForTimeout(2000);
// const p = path.resolve(cfg.dir.screenshots, 'epic-games', 'captcha', `${filenamify(datetime())}.png`);
// await captcha.screenshot({ path: p });
// console.info(' Saved a screenshot of hcaptcha challenge to', p);
// console.error(' Got hcaptcha challenge. To avoid it, get a link from https://www.hcaptcha.com/accessibility'); // TODO save this link in config and visit it daily to set accessibility cookie to avoid captcha challenge?
}).catch(_ => { }); // may time out if not shown
iframe.locator('.payment__errors:has-text("Failed to challenge captcha, please try again later.")').waitFor().then(async () => {
console.error(' Failed to challenge captcha, please try again later.');
await notify('epic-games: failed to challenge captcha. Please check.');
}).catch(_ => { });
await page.locator('text=Thanks for your order!').waitFor({ state: 'attached' }); // TODO Bundle: got stuck here, but normal game now as well
db.data[user][game_id].status = 'claimed';
db.data[user][game_id].time = datetime(); // claimed time overwrites failed/dryrun time
console.log(' Claimed successfully!');
// context.setDefaultTimeout(cfg.timeout);
} catch (e) {
console.log(e);
// console.error(' Failed to claim! Try again if NopeCHA timed out. Click the extension to see if you ran out of credits (refill after 24h). To avoid captchas try to get a new IP or set a cookie from https://www.hcaptcha.com/accessibility');
console.error(' Failed to claim! To avoid captchas try to get a new IP address.');
const p = screenshot('failed', `${game_id}_${filenamify(datetime())}.png`);
await page.screenshot({ path: p, fullPage: true });
db.data[user][game_id].status = 'failed';
}
notify_game.status = db.data[user][game_id].status; // claimed or failed
const p = screenshot(`${game_id}.png`);
if (!existsSync(p)) await page.screenshot({ path: p, fullPage: false }); // fullPage is quite long...
}
if (cfg.time) console.timeEnd('claim game');
}
if (cfg.time) console.timeEnd('claim all games');
} catch (error) {
process.exitCode ||= 1;
console.error('--- Exception:');
console.error(error); // .toString()?
if (error.message && process.exitCode != 130) notify(`epic-games failed: ${error.message.split('\n')[0]}`);
} finally {
await db.write(); // write out json db
if (notify_games.filter(g => g.status == 'claimed' || g.status == 'failed').length) { // don't notify if all have status 'existed', 'manual', 'requires base game', 'unavailable-in-region', 'skipped'
notify(`epic-games (${user}):<br>${html_game_list(notify_games)}`);
}
}
if (cfg.debug) writeFileSync(path.resolve(cfg.dir.browser, 'cookies.json'), JSON.stringify(await context.cookies()));
if (page.video()) console.log('Recorded video:', await page.video().path());
await context.close();

View File

@ -1,76 +0,0 @@
// https://eslint.org/docs/latest/use/configure/configuration-files-new
// https://eslint.org/docs/latest/use/configure/migration-guide
import js from '@eslint/js';
import globals from 'globals';
import stylistic from '@stylistic/eslint-plugin-js';
export default [
// https://eslint.org/docs/latest/use/configure/configuration-files-new#globally-ignoring-files-with-ignores
// object with just `ignores` applies to all configuration objects
// had `ln -s .gitignore .eslintignore` before, but .eslintignore no longer supported
{
ignores: ['data/**'],
},
js.configs.recommended, // TODO still needed?
{
// files: ['*.js'],
languageOptions: {
globals: globals.node,
},
plugins: {
'@stylistic/js': stylistic,
},
// https://eslint.org/docs/latest/rules/
// https://eslint.style/packages/js
rules: {
'no-unused-vars': ['error', { argsIgnorePattern: '^_' }],
'prefer-const': 'error',
'@stylistic/js/array-bracket-newline': ['error', 'consistent'],
'@stylistic/js/array-bracket-spacing': 'error',
'@stylistic/js/array-element-newline': ['error', 'consistent'],
'@stylistic/js/arrow-parens': ['error', 'as-needed'],
'@stylistic/js/arrow-spacing': 'error',
'@stylistic/js/block-spacing': 'error',
'@stylistic/js/brace-style': 'error',
'@stylistic/js/comma-dangle': ['error', 'always-multiline'],
'@stylistic/js/comma-spacing': 'error',
'@stylistic/js/comma-style': 'error',
'@stylistic/js/eol-last': 'error',
'@stylistic/js/func-call-spacing': 'error',
'@stylistic/js/function-paren-newline': ['error', 'consistent'],
'@stylistic/js/implicit-arrow-linebreak': 'error',
'@stylistic/js/indent': ['error', 2],
'@stylistic/js/key-spacing': 'error',
'@stylistic/js/keyword-spacing': 'error',
'@stylistic/js/linebreak-style': 'error',
'@stylistic/js/no-extra-parens': 'error',
'@stylistic/js/no-extra-semi': 'error',
'@stylistic/js/no-mixed-spaces-and-tabs': 'error',
'@stylistic/js/no-multi-spaces': 'error',
'@stylistic/js/no-multiple-empty-lines': 'error',
'@stylistic/js/no-tabs': 'error',
'@stylistic/js/no-trailing-spaces': 'error',
'@stylistic/js/no-whitespace-before-property': 'error',
'@stylistic/js/nonblock-statement-body-position': 'error',
'@stylistic/js/object-curly-newline': 'error',
'@stylistic/js/object-curly-spacing': ['error', 'always'],
'@stylistic/js/object-property-newline': ['error', { allowAllPropertiesOnSameLine: true }],
'@stylistic/js/quote-props': ['error', 'as-needed'],
'@stylistic/js/quotes': ['error', 'single'],
'@stylistic/js/rest-spread-spacing': 'error',
'@stylistic/js/semi': 'error',
'@stylistic/js/semi-spacing': 'error',
'@stylistic/js/semi-style': 'error',
'@stylistic/js/space-before-blocks': 'error',
'@stylistic/js/space-before-function-paren': ['error', { anonymous: 'never', named: 'never', asyncArrow: 'always' }],
'@stylistic/js/space-in-parens': 'error',
'@stylistic/js/space-infix-ops': 'error',
'@stylistic/js/space-unary-ops': 'error',
'@stylistic/js/spaced-comment': 'error',
'@stylistic/js/switch-colon-spacing': 'error',
'@stylistic/js/template-curly-spacing': 'error',
'@stylistic/js/template-tag-spacing': 'error',
'@stylistic/js/wrap-regex': 'error',
},
},
];

View File

@ -1,157 +0,0 @@
import { firefox } from 'playwright-firefox'; // stealth plugin needs no outdated playwright-extra
import chalk from 'chalk';
import { resolve, jsonDb, datetime, filenamify, prompt, notify, html_game_list, handleSIGINT } from './src/util.js';
import { cfg } from './src/config.js';
const screenshot = (...a) => resolve(cfg.dir.screenshots, 'gog', ...a);
const URL_CLAIM = 'https://www.gog.com/en';
console.log(datetime(), 'started checking gog');
const db = await jsonDb('gog.json', {});
if (cfg.width < 1280) { // otherwise 'Sign in' and #menuUsername are hidden (but attached to DOM), see https://github.com/vogler/free-games-claimer/issues/335
console.error(`Window width is set to ${cfg.width} but needs to be at least 1280 for GOG!`);
process.exit(1);
}
// https://playwright.dev/docs/auth#multi-factor-authentication
const context = await firefox.launchPersistentContext(cfg.dir.browser, {
headless: cfg.headless,
viewport: { width: cfg.width, height: cfg.height },
locale: 'en-US', // ignore OS locale to be sure to have english text for locators -> done via /en in URL
recordVideo: cfg.record ? { dir: 'data/record/', size: { width: cfg.width, height: cfg.height } } : undefined, // will record a .webm video for each page navigated; without size, video would be scaled down to fit 800x800
recordHar: cfg.record ? { path: `data/record/gog-${filenamify(datetime())}.har` } : undefined, // will record a HAR file with network requests and responses; can be imported in Chrome devtools
handleSIGINT: false, // have to handle ourselves and call context.close(), otherwise recordings from above won't be saved
});
handleSIGINT(context);
if (!cfg.debug) context.setDefaultTimeout(cfg.timeout);
const page = context.pages().length ? context.pages()[0] : await context.newPage(); // should always exist
await page.setViewportSize({ width: cfg.width, height: cfg.height }); // TODO workaround for https://github.com/vogler/free-games-claimer/issues/277 until Playwright fixes it
// console.debug('userAgent:', await page.evaluate(() => navigator.userAgent));
const notify_games = [];
let user;
try {
await context.addCookies([{ name: 'CookieConsent', value: '{stamp:%274oR8MJL+bxVlG6g+kl2we5+suMJ+Tv7I4C5d4k+YY4vrnhCD+P23RQ==%27%2Cnecessary:true%2Cpreferences:true%2Cstatistics:true%2Cmarketing:true%2Cmethod:%27explicit%27%2Cver:1%2Cutc:1672331618201%2Cregion:%27de%27}', domain: 'www.gog.com', path: '/' }]); // to not waste screen space when non-headless
await page.goto(URL_CLAIM, { waitUntil: 'domcontentloaded' }); // default 'load' takes forever
// page.click('#CybotCookiebotDialogBodyLevelButtonLevelOptinAllowAll').catch(_ => { }); // does not work reliably, solved by setting CookieConsent above
const signIn = page.locator('a:has-text("Sign in")').first();
await Promise.any([signIn.waitFor(), page.waitForSelector('#menuUsername')]);
while (await signIn.isVisible()) {
console.error('Not signed in anymore.');
await signIn.click();
// it then creates an iframe for the login
await page.waitForSelector('#GalaxyAccountsFrameContainer iframe'); // TODO needed?
const iframe = page.frameLocator('#GalaxyAccountsFrameContainer iframe');
if (!cfg.debug) context.setDefaultTimeout(cfg.login_timeout); // give user some extra time to log in
console.info(`Login timeout is ${cfg.login_timeout / 1000} seconds!`);
if (cfg.gog_email && cfg.gog_password) console.info('Using email and password from environment.');
else console.info('Press ESC to skip the prompts if you want to login in the browser (not possible in headless mode).');
const email = cfg.gog_email || await prompt({ message: 'Enter email' });
const password = email && (cfg.gog_password || await prompt({ type: 'password', message: 'Enter password' }));
if (email && password) {
iframe.locator('a[href="/logout"]').click().catch(_ => { }); // Click 'Change account' (email from previous login is set in some cookie)
await iframe.locator('#login_username').fill(email);
await iframe.locator('#login_password').fill(password);
await iframe.locator('#login_login').click();
// handle MFA, but don't await it
iframe.locator('form[name=second_step_authentication]').waitFor().then(async () => {
console.log('Two-Step Verification - Enter security code');
console.log(await iframe.locator('.form__description').innerText());
const otp = await prompt({ type: 'text', message: 'Enter two-factor sign in code', validate: n => n.toString().length == 4 || 'The code must be 4 digits!' }); // can't use type: 'number' since it strips away leading zeros and codes sometimes have them
await iframe.locator('#second_step_authentication_token_letter_1').pressSequentially(otp.toString(), { delay: 10 });
await iframe.locator('#second_step_authentication_send').click();
await page.waitForTimeout(1000); // TODO still needed with wait for username below?
}).catch(_ => { });
// iframe.locator('iframe[title=reCAPTCHA]').waitFor().then(() => {
// iframe.locator('.g-recaptcha').waitFor().then(() => {
iframe.locator('text=Invalid captcha').waitFor().then(() => {
console.error('Got a captcha during login (likely due to too many attempts)! You may solve it in the browser, get a new IP or try again in a few hours.');
notify('gog: got captcha during login. Please check.');
// TODO solve reCAPTCHA?
}).catch(_ => { });
await page.waitForSelector('#menuUsername');
} else {
console.log('Waiting for you to login in the browser.');
await notify('gog: no longer signed in and not enough options set for automatic login.');
if (cfg.headless) {
console.log('Run `SHOW=1 node gog` to login in the opened browser.');
await context.close();
process.exit(1);
}
}
await page.waitForSelector('#menuUsername');
if (!cfg.debug) context.setDefaultTimeout(cfg.timeout);
}
user = await page.locator('#menuUsername').first().textContent(); // innerText is uppercase due to styling!
console.log(`Signed in as ${user}`);
db.data[user] ||= {};
const banner = page.locator('#giveaway');
if (!await banner.count()) {
console.log('Currently no free giveaway!');
} else {
const text = await page.locator('.giveaway__content-header').innerText();
const match_all = text.match(/Claim (.*) and don't miss the|Success! (.*) was added to/);
const title = match_all[1] ? match_all[1] : match_all[2];
const url = await banner.locator('a').first().getAttribute('href');
console.log(`Current free game: ${chalk.blue(title)} - ${url}`);
db.data[user][title] ||= { title, time: datetime(), url };
if (cfg.dryrun) process.exit(1);
// await page.locator('#giveaway:not(.is-loading)').waitFor(); // otherwise screenshot is sometimes with loading indicator instead of game title; #TODO fix, skipped due to timeout, see #240
await banner.screenshot({ path: screenshot(`${filenamify(title)}.png`) }); // overwrites every time - only keep first?
// await banner.getByRole('button', { name: 'Add to library' }).click();
// instead of clicking the button, we visit the auto-claim URL which gives as a JSON response which is easier than checking the state of a button
await page.goto('https://www.gog.com/giveaway/claim');
const response = await page.innerText('body');
// console.log(response);
// {} // when successfully claimed
// {"message":"Already claimed"}
// {"message":"Unauthorized"}
// {"message":"Giveaway has ended"}
let status;
if (response == '{}') {
status = 'claimed';
console.log(' Claimed successfully!');
} else {
const message = JSON.parse(response).message;
if (message == 'Already claimed') {
status = 'existed'; // same status text as for epic-games
console.log(' Already in library! Nothing to claim.');
} else {
console.log(response);
status = message;
}
}
db.data[user][title].status ||= status;
notify_games.push({ title, url, status });
if (status == 'claimed' && !cfg.gog_newsletter) {
console.log('Unsubscribe from \'Promotions and hot deals\' newsletter');
await page.goto('https://www.gog.com/en/account/settings/subscriptions');
await page.locator('li:has-text("Marketing communications through Trusted Partners") label').uncheck();
await page.locator('li:has-text("Promotions and hot deals") label').uncheck();
}
}
} catch (error) {
process.exitCode ||= 1;
console.error('--- Exception:');
console.error(error); // .toString()?
if (error.message && process.exitCode != 130) notify(`gog failed: ${error.message.split('\n')[0]}`);
} finally {
await db.write(); // write out json db
if (notify_games.filter(g => g.status != 'existed').length) { // don't notify if all were already claimed
notify(`gog (${user}):<br>${html_game_list(notify_games)}`);
}
}
if (page.video()) console.log('Recorded video:', await page.video().path());
await context.close();

View File

@ -1,9 +0,0 @@
{
"compilerOptions": {
"checkJs": true,
"target": "es2021",
"module": "NodeNext",
"moduleResolution": "NodeNext", // https://github.com/typicode/lowdb/issues/554
},
"exclude": ["node_modules", "**/node_modules"]
}

File diff suppressed because it is too large Load Diff

View File

@ -1,37 +0,0 @@
{
"name": "free-games-claimer",
"version": "1.4.0",
"description": "Automatically claims free games on the Epic Games Store, Amazon Prime Gaming and GOG.",
"homepage": "https://github.com/vogler/free-games-claimer",
"repository": {
"type": "git",
"url": "https://github.com/vogler/free-games-claimer.git"
},
"author": "Ralf Vogler",
"license": "AGPL-3.0-only",
"main": "index.js",
"scripts": {
"docker:build": "docker build . -t ghcr.io/vogler/free-games-claimer",
"docker": "cross-env-shell docker run --rm -it -p 5900:5900 -p 6080:6080 -v \\\"$INIT_CWD/data\\\":/fgc/data --name fgc ghcr.io/vogler/free-games-claimer",
"lint": "npx eslint ."
},
"type": "module",
"engines": {
"node": ">=17"
},
"dependencies": {
"chalk": "^5.3.0",
"cross-env": "^7.0.3",
"dotenv": "^16.4.5",
"enquirer": "^2.4.1",
"fingerprint-injector": "^2.1.52",
"lowdb": "^7.0.1",
"otplib": "^12.0.1",
"playwright-firefox": "^1.45.0",
"puppeteer-extra-plugin-stealth": "^2.11.2"
},
"devDependencies": {
"@stylistic/eslint-plugin-js": "^4.0.0",
"eslint": "^9.5.0"
}
}

View File

@ -1,443 +0,0 @@
import { firefox } from 'playwright-firefox'; // stealth plugin needs no outdated playwright-extra
import { authenticator } from 'otplib';
import chalk from 'chalk';
import { resolve, jsonDb, datetime, stealth, filenamify, prompt, confirm, notify, html_game_list, handleSIGINT } from './src/util.js';
import { cfg } from './src/config.js';
const screenshot = (...a) => resolve(cfg.dir.screenshots, 'prime-gaming', ...a);
// const URL_LOGIN = 'https://www.amazon.de/ap/signin'; // wrong. needs some session args to be valid?
const URL_CLAIM = 'https://gaming.amazon.com/home';
console.log(datetime(), 'started checking prime-gaming');
const db = await jsonDb('prime-gaming.json', {});
// https://playwright.dev/docs/auth#multi-factor-authentication
const context = await firefox.launchPersistentContext(cfg.dir.browser, {
headless: cfg.headless,
viewport: { width: cfg.width, height: cfg.height },
locale: 'en-US', // ignore OS locale to be sure to have english text for locators
recordVideo: cfg.record ? { dir: 'data/record/', size: { width: cfg.width, height: cfg.height } } : undefined, // will record a .webm video for each page navigated; without size, video would be scaled down to fit 800x800
recordHar: cfg.record ? { path: `data/record/pg-${filenamify(datetime())}.har` } : undefined, // will record a HAR file with network requests and responses; can be imported in Chrome devtools
handleSIGINT: false, // have to handle ourselves and call context.close(), otherwise recordings from above won't be saved
});
handleSIGINT(context);
// TODO test if needed
await stealth(context);
if (!cfg.debug) context.setDefaultTimeout(cfg.timeout);
const page = context.pages().length ? context.pages()[0] : await context.newPage(); // should always exist
await page.setViewportSize({ width: cfg.width, height: cfg.height }); // TODO workaround for https://github.com/vogler/free-games-claimer/issues/277 until Playwright fixes it
// console.debug('userAgent:', await page.evaluate(() => navigator.userAgent));
const notify_games = [];
let user;
try {
await page.goto(URL_CLAIM, { waitUntil: 'domcontentloaded' }); // default 'load' takes forever
// need to wait for some elements to exist before checking if signed in or accepting cookies:
await Promise.any(['button:has-text("Sign in")', '[data-a-target="user-dropdown-first-name-text"]'].map(s => page.waitForSelector(s)));
page.click('[aria-label="Cookies usage disclaimer banner"] button:has-text("Accept Cookies")').catch(_ => { }); // to not waste screen space when non-headless, TODO does not work reliably, need to wait for something else first?
while (await page.locator('button:has-text("Sign in")').count() > 0) {
console.error('Not signed in anymore.');
await page.click('button:has-text("Sign in")');
if (!cfg.debug) context.setDefaultTimeout(cfg.login_timeout); // give user some extra time to log in
console.info(`Login timeout is ${cfg.login_timeout / 1000} seconds!`);
if (cfg.pg_email && cfg.pg_password) console.info('Using email and password from environment.');
else console.info('Press ESC to skip the prompts if you want to login in the browser (not possible in headless mode).');
const email = cfg.pg_email || await prompt({ message: 'Enter email' });
const password = email && (cfg.pg_password || await prompt({ type: 'password', message: 'Enter password' }));
if (email && password) {
await page.fill('[name=email]', email);
await page.click('input[type="submit"]');
await page.fill('[name=password]', password);
// await page.check('[name=rememberMe]'); // no longer exists
await page.click('input[type="submit"]');
page.waitForURL('**/ap/signin**').then(async () => { // check for wrong credentials
const error = await page.locator('.a-alert-content').first().innerText();
if (!error.trim.length) return;
console.error('Login error:', error);
await notify(`prime-gaming: login: ${error}`);
await context.close(); // finishes potential recording
process.exit(1);
});
// handle MFA, but don't await it
page.waitForURL('**/ap/mfa**').then(async () => {
console.log('Two-Step Verification - enter the One Time Password (OTP), e.g. generated by your Authenticator App');
await page.check('[name=rememberDevice]');
const otp = cfg.pg_otpkey && authenticator.generate(cfg.pg_otpkey) || await prompt({ type: 'text', message: 'Enter two-factor sign in code', validate: n => n.toString().length == 6 || 'The code must be 6 digits!' }); // can't use type: 'number' since it strips away leading zeros and codes sometimes have them
await page.locator('input[name=otpCode]').pressSequentially(otp.toString());
await page.click('input[type="submit"]');
}).catch(_ => { });
} else {
console.log('Waiting for you to login in the browser.');
await notify('prime-gaming: no longer signed in and not enough options set for automatic login.');
if (cfg.headless) {
console.log('Run `SHOW=1 node prime-gaming` to login in the opened browser.');
await context.close(); // finishes potential recording
process.exit(1);
}
}
await page.waitForURL('https://gaming.amazon.com/home?signedIn=true');
if (!cfg.debug) context.setDefaultTimeout(cfg.timeout);
}
user = await page.locator('[data-a-target="user-dropdown-first-name-text"]').first().innerText();
console.log(`Signed in as ${user}`);
// await page.click('button[aria-label="User dropdown and more options"]');
// const twitch = await page.locator('[data-a-target="TwitchDisplayName"]').first().innerText();
// console.log(`Twitch user name is ${twitch}`);
db.data[user] ||= {};
if (await page.getByRole('button', { name: 'Try Prime' }).count()) {
console.error('User is currently not an Amazon Prime member, so no games to claim. Exit!');
await context.close();
process.exit(1);
}
const waitUntilStable = async (f, act) => {
let v;
while (true) {
const v2 = await f();
console.log('waitUntilStable', v2);
if (v == v2) break;
v = v2;
await act();
}
};
const scrollUntilStable = async f => await waitUntilStable(f, async () => {
// await page.keyboard.press('End'); // scroll to bottom to show all games
// loading all games became flaky; see https://github.com/vogler/free-games-claimer/issues/357
await page.keyboard.press('PageDown'); // scrolling to straight to the bottom started to skip loading some games
await page.waitForLoadState('networkidle'); // wait for all games to be loaded
await page.waitForTimeout(3000); // TODO networkidle wasn't enough to load all already collected games
// do it again since once wasn't enough...
await page.keyboard.press('PageDown');
await page.waitForTimeout(3000);
});
await page.click('button[data-type="Game"]');
const games = page.locator('div[data-a-target="offer-list-FGWP_FULL"]');
await games.waitFor();
// await scrollUntilStable(() => games.locator('.item-card__action').count()); // number of games
await scrollUntilStable(() => page.evaluate(() => document.querySelector('.tw-full-width').scrollHeight)); // height may change during loading while number of games is still the same?
console.log('Number of already claimed games (total):', await games.locator('p:has-text("Collected")').count());
// can't use .all() since the list of elements via locator will change after click while we iterate over it
const internal = await games.locator('.item-card__action:has(button[data-a-target="FGWPOffer"])').elementHandles();
const external = await games.locator('.item-card__action:has(a[data-a-target="FGWPOffer"])').all();
// bottom to top: oldest to newest games
internal.reverse();
external.reverse();
const sameOrNewPage = async url => new Promise(async (resolve, _reject) => {
const isNew = page.url() != url;
let p = page;
if (isNew) {
p = await context.newPage();
await p.goto(url, { waitUntil: 'domcontentloaded' });
}
resolve([p, isNew]);
});
const skipBasedOnTime = async url => {
// console.log(' Checking time left for game:', url);
const [p, isNew] = await sameOrNewPage(url);
const dueDateOrg = await p.locator('.availability-date .tw-bold').innerText();
const dueDate = new Date(Date.parse(dueDateOrg + ' 17:00'));
const daysLeft = (dueDate.getTime() - Date.now())/1000/60/60/24;
console.log(' ', await p.locator('.availability-date').innerText(), '->', daysLeft.toFixed(2));
if (isNew) await p.close();
return daysLeft > cfg.pg_timeLeft;
}
console.log('\nNumber of free unclaimed games (Prime Gaming):', internal.length);
// claim games in internal store
for (const card of internal) {
await card.scrollIntoViewIfNeeded();
const title = await (await card.$('.item-card-details__body__primary')).innerText();
const slug = await (await card.$('a')).getAttribute('href');
const url = 'https://gaming.amazon.com' + slug.split('?')[0];
console.log('Current free game:', chalk.blue(title));
if (cfg.pg_timeLeft && await skipBasedOnTime(url)) continue;
if (cfg.dryrun) continue;
if (cfg.interactive && !await confirm()) continue;
await (await card.$('.tw-button:has-text("Claim")')).click();
db.data[user][title] ||= { title, time: datetime(), url, store: 'internal' };
notify_games.push({ title, status: 'claimed', url });
// const img = await (await card.$('img.tw-image')).getAttribute('src');
// console.log('Image:', img);
await card.screenshot({ path: screenshot('internal', `${filenamify(title)}.png`) });
}
console.log('\nNumber of free unclaimed games (external stores):', external.length);
// claim games in external/linked stores. Linked: origin.com, epicgames.com; Redeem-key: gog.com, legacygames.com, microsoft
const external_info = [];
for (const card of external) { // need to get data incl. URLs in this loop and then navigate in another, otherwise .all() would update after coming back and .elementHandles() like above would lead to error due to page navigation: elementHandle.$: Protocol error (Page.adoptNode)
const title = await card.locator('.item-card-details__body__primary').innerText();
const slug = await card.locator('a:has-text("Claim")').first().getAttribute('href');
const url = 'https://gaming.amazon.com' + slug.split('?')[0];
// await (await card.$('text=Claim')).click(); // goes to URL of game, no need to wait
external_info.push({ title, url });
}
// external_info = [ { title: 'Fallout 76 (XBOX)', url: 'https://gaming.amazon.com/fallout-76-xbox-fgwp/dp/amzn1.pg.item.9fe17d7b-b6c2-4f58-b494-cc4e79528d0b?ingress=amzn&ref_=SM_Fallout76XBOX_S01_FGWP_CRWN' } ];
for (const { title, url } of external_info) {
console.log('Current free game:', chalk.blue(title)); // , url);
await page.goto(url, { waitUntil: 'domcontentloaded' });
if (cfg.debug) await page.pause();
const item_text = await page.innerText('[data-a-target="DescriptionItemDetails"]');
const store = item_text.toLowerCase().replace(/.* on /, '').slice(0, -1);
console.log(' External store:', store);
if (cfg.pg_timeLeft && await skipBasedOnTime(url)) continue;
if (cfg.dryrun) continue;
if (cfg.interactive && !await confirm()) continue;
await Promise.any([page.click('[data-a-target="buy-box"] .tw-button:has-text("Get game")'), page.click('[data-a-target="buy-box"] .tw-button:has-text("Claim")'), page.click('.tw-button:has-text("Complete Claim")'), page.waitForSelector('div:has-text("Link game account")'), page.waitForSelector('.thank-you-title:has-text("Success")')]); // waits for navigation
db.data[user][title] ||= { title, time: datetime(), url, store };
const notify_game = { title, url };
notify_games.push(notify_game); // status is updated below
if (await page.locator('div:has-text("Link game account")').count() // TODO still needed? epic games store just has 'Link account' as the button text now.
|| await page.locator('div:has-text("Link account")').count()) {
console.error(' Account linking is required to claim this offer!');
notify_game.status = `failed: need account linking for ${store}`;
db.data[user][title].status = 'failed: need account linking';
// await page.pause();
// await page.click('[data-a-target="LinkAccountModal"] [data-a-target="LinkAccountButton"]');
// TODO login for epic games also needed if already logged in
// wait for https://www.epicgames.com/id/authorize?redirect_uri=https%3A%2F%2Fservice.link.amazon.gg...
// await page.click('button[aria-label="Allow"]');
} else {
db.data[user][title].status = 'claimed';
// print code if there is one
const redeem = {
// 'origin': 'https://www.origin.com/redeem', // TODO still needed or now only via account linking?
'gog.com': 'https://www.gog.com/redeem',
'microsoft store': 'https://account.microsoft.com/billing/redeem',
xbox: 'https://account.microsoft.com/billing/redeem',
'legacy games': 'https://www.legacygames.com/primedeal',
};
if (store in redeem) { // did not work for linked origin: && !await page.locator('div:has-text("Successfully Claimed")').count()
const code = await Promise.any([page.inputValue('input[type="text"]'), page.textContent('[data-a-target="ClaimStateClaimCodeContent"]').then(s => s.replace('Your code: ', ''))]); // input: Legacy Games; text: gog.com
console.log(' Code to redeem game:', chalk.blue(code));
if (store == 'legacy games') { // may be different URL like https://legacygames.com/primeday/puzzleoftheyear/
redeem[store] = await (await page.$('li:has-text("Click here") a')).getAttribute('href'); // full text: Click here to enter your redemption code.
}
let redeem_url = redeem[store];
if (store == 'gog.com') redeem_url += '/' + code; // to log and notify, but can't use for goto below (captcha)
console.log(' URL to redeem game:', redeem_url);
db.data[user][title].code = code;
let redeem_action = 'redeem';
if (cfg.pg_redeem) { // try to redeem keys on external stores
console.log(` Trying to redeem ${code} on ${store} (need to be logged in)!`);
const page2 = await context.newPage();
await page2.goto(redeem[store], { waitUntil: 'domcontentloaded' });
if (store == 'gog.com') {
// await page.goto(`https://redeem.gog.com/v1/bonusCodes/${code}`); // {"reason":"Invalid or no captcha"}
await page2.fill('#codeInput', code);
// wait for responses before clicking on Continue and then Redeem
// first there are requests with OPTIONS and GET to https://redeem.gog.com/v1/bonusCodes/XYZ?language=de-DE
const r1 = page2.waitForResponse(r => r.request().method() == 'GET' && r.url().startsWith('https://redeem.gog.com/'));
await page2.click('[type="submit"]'); // click Continue
// console.log(await page2.locator('.warning-message').innerText()); // does not exist if there is no warning
const r1t = await (await r1).text();
const reason = JSON.parse(r1t).reason;
// {"reason":"Invalid or no captcha"}
// {"reason":"code_used"}
// {"reason":"code_not_found"}
if (reason?.includes('captcha')) {
redeem_action = 'redeem (got captcha)';
console.error(' Got captcha; could not redeem!');
} else if (reason == 'code_used') {
redeem_action = 'already redeemed';
console.log(' Code was already used!');
} else if (reason == 'code_not_found') {
redeem_action = 'redeem (not found)';
console.error(' Code was not found!');
} else { // TODO not logged in? need valid unused code to test.
redeem_action = 'redeemed?';
// console.log(' Redeemed successfully? Please report your Responses (if new) in https://github.com/vogler/free-games-claimer/issues/5');
console.debug(` Response 1: ${r1t}`);
// then after the click on Redeem there is a POST request which should return {} if claimed successfully
const r2 = page2.waitForResponse(r => r.request().method() == 'POST' && r.url().startsWith('https://redeem.gog.com/'));
await page2.click('[type="submit"]'); // click Redeem
const r2t = await (await r2).text();
const reason2 = JSON.parse(r2t).reason;
if (r2t == '{}') {
redeem_action = 'redeemed';
console.log(' Redeemed successfully.');
db.data[user][title].status = 'claimed and redeemed';
} else if (reason2?.includes('captcha')) {
redeem_action = 'redeem (got captcha)';
console.error(' Got captcha; could not redeem!');
} else {
console.debug(` Response 2: ${r2t}`);
console.log(' Unknown Response 2 - please report in https://github.com/vogler/free-games-claimer/issues/5');
}
}
} else if (store == 'microsoft store' || store == 'xbox') {
console.error(` Redeem on ${store} is experimental!`);
// await page2.pause();
if (page2.url().startsWith('https://login.')) {
console.error(' Not logged in! Please redeem the code above manually. You can now login in the browser for next time. Waiting for 60s.');
await page2.waitForTimeout(60 * 1000);
redeem_action = 'redeem (login)';
} else {
const iframe = page2.frameLocator('#redeem-iframe');
const input = iframe.locator('[name=tokenString]');
await input.waitFor();
await input.fill(code);
const r = page2.waitForResponse(r => r.url().startsWith('https://cart.production.store-web.dynamics.com/v1.0/Redeem/PrepareRedeem'));
// console.log(await page2.locator('.redeem_code_error').innerText());
const rt = await (await r).text();
// {"code":"NotFound","data":[],"details":[],"innererror":{"code":"TokenNotFound",...
const j = JSON.parse(rt);
const reason = j?.events?.cart.length && j.events.cart[0]?.data?.reason;
if (reason == 'TokenNotFound') {
redeem_action = 'redeem (not found)';
console.error(' Code was not found!');
} else if (j?.productInfos?.length && j.productInfos[0]?.redeemable) {
await iframe.locator('button:has-text("Next")').click();
await iframe.locator('button:has-text("Confirm")').click();
const r = page2.waitForResponse(r => r.url().startsWith('https://cart.production.store-web.dynamics.com/v1.0/Redeem/RedeemToken'));
const j = JSON.parse(await (await r).text());
if (j?.events?.cart.length && j.events.cart[0]?.data?.reason == 'UserAlreadyOwnsContent') {
redeem_action = 'already redeemed';
console.error(' error: UserAlreadyOwnsContent');
} else if (true) { // TODO what's returned on success?
redeem_action = 'redeemed';
db.data[user][title].status = 'claimed and redeemed?';
console.log(' Redeemed successfully? Please report if not in https://github.com/vogler/free-games-claimer/issues/5');
}
} else { // TODO find out other responses
redeem_action = 'unknown';
console.debug(` Response: ${rt}`);
console.log(' Redeemed successfully? Please report your Response from above (if it is new) in https://github.com/vogler/free-games-claimer/issues/5');
}
}
} else if (store == 'legacy games') {
// await page2.pause();
await page2.fill('[name=coupon_code]', code);
await page2.fill('[name=email]', cfg.lg_email);
await page2.fill('[name=email_validate]', cfg.lg_email);
await page2.uncheck('[name=newsletter_sub]');
await page2.click('[type="submit"]');
try {
// await page2.waitForResponse(r => r.url().startsWith('https://promo.legacygames.com/promotion-processing/order-management.php')); // status code 302
await page2.waitForSelector('h2:has-text("Thanks for redeeming")');
redeem_action = 'redeemed';
db.data[user][title].status = 'claimed and redeemed';
} catch (error) {
console.error(' Got error', error);
redeem_action = 'redeemed?';
db.data[user][title].status = 'claimed and redeemed?';
console.log(' Redeemed successfully? Please report problems in https://github.com/vogler/free-games-claimer/issues/5');
}
} else {
console.error(` Redeem on ${store} not yet implemented!`);
}
if (cfg.debug) await page2.pause();
await page2.close();
}
notify_game.status = `<a href="${redeem_url}">${redeem_action}</a> ${code} on ${store}`;
} else {
notify_game.status = `claimed on ${store}`;
db.data[user][title].status = 'claimed';
}
// save screenshot of potential code just in case
await page.screenshot({ path: screenshot('external', `${filenamify(title)}.png`), fullPage: true });
// console.info(' Saved a screenshot of page to', p);
}
// await page.pause();
}
await page.goto(URL_CLAIM, { waitUntil: 'domcontentloaded' });
await page.click('button[data-type="Game"]');
if (notify_games.length) { // make screenshot of all games if something was claimed
const p = screenshot(`${filenamify(datetime())}.png`);
// await page.screenshot({ path: p, fullPage: true }); // fullPage does not make a difference since scroll not on body but on some element
await scrollUntilStable(() => games.locator('.item-card__action').count());
const viewportSize = page.viewportSize(); // current viewport size
await page.setViewportSize({ ...viewportSize, height: 3000 }); // increase height, otherwise element screenshot is cut off at the top and bottom
await games.screenshot({ path: p }); // screenshot of all claimed games
}
// https://github.com/vogler/free-games-claimer/issues/55
if (cfg.pg_claimdlc) {
console.log('Trying to claim in-game content...');
await page.click('button[data-type="InGameLoot"]');
const loot = page.locator('div[data-a-target="offer-list-IN_GAME_LOOT"]');
await loot.waitFor();
process.stdout.write('Loading all DLCs on page...');
await scrollUntilStable(() => loot.locator('[data-a-target="item-card"]').count())
console.log('\nNumber of already claimed DLC:', await loot.locator('p:has-text("Collected")').count());
const cards = await loot.locator('[data-a-target="item-card"]:has(p:text-is("Claim"))').all();
console.log('Number of unclaimed DLC:', cards.length);
const dlcs = await Promise.all(cards.map(async card => ({
game: await card.locator('.item-card-details__body p').innerText(),
title: await card.locator('.item-card-details__body__primary').innerText(),
url: 'https://gaming.amazon.com' + await card.locator('a').first().getAttribute('href'),
})));
// console.log(dlcs);
const dlc_unlinked = {};
for (const dlc of dlcs) {
const title = `${dlc.game} - ${dlc.title}`;
const url = dlc.url;
console.log('Current DLC:', title);
if (cfg.debug) await page.pause();
if (cfg.dryrun) continue;
if (cfg.interactive && !await confirm()) continue;
db.data[user][title] ||= { title, time: datetime(), store: 'DLC', status: 'failed: need account linking' };
const notify_game = { title, url };
notify_games.push(notify_game); // status is updated below
try {
await page.goto(url, { waitUntil: 'domcontentloaded' });
// most games have a button 'Get in-game content'
// epic-games: Fall Guys: Claim -> Continue -> Go to Epic Games (despite account linked and logged into epic-games) -> not tied to account but via some cookie?
await Promise.any([page.click('.tw-button:has-text("Get in-game content")'), page.click('.tw-button:has-text("Claim your gift")'), page.click('.tw-button:has-text("Claim")').then(() => page.click('button:has-text("Continue")'))]);
page.click('button:has-text("Continue")').catch(_ => { });
const linkAccountButton = page.locator('[data-a-target="LinkAccountButton"]');
let unlinked_store;
if (await linkAccountButton.count()) {
unlinked_store = await linkAccountButton.first().getAttribute('aria-label');
console.debug(' LinkAccountButton label:', unlinked_store);
const match = unlinked_store.match(/Link (.*) account/);
if (match && match.length == 2) unlinked_store = match[1];
} else if (await page.locator('text=Link game account').count()) { // epic-games only?
console.error(' Missing account linking (epic-games specific button?):', await page.locator('button[data-a-target="gms-cta"]').innerText()); // TODO needed?
unlinked_store = 'epic-games';
}
if (unlinked_store) {
console.error(' Missing account linking:', unlinked_store, url);
dlc_unlinked[unlinked_store] ??= [];
dlc_unlinked[unlinked_store].push(title);
} else {
const code = await page.inputValue('input[type="text"]').catch(_ => undefined);
console.log(' Code to redeem game:', chalk.blue(code));
db.data[user][title].code = code;
db.data[user][title].status = 'claimed';
// notify_game.status = `<a href="${redeem[store]}">${redeem_action}</a> ${code} on ${store}`;
}
// await page.pause();
} catch (error) {
console.error(error);
} finally {
await page.goto(URL_CLAIM, { waitUntil: 'domcontentloaded' });
await page.click('button[data-type="InGameLoot"]');
}
}
console.log('DLC: Unlinked accounts:', dlc_unlinked);
}
} catch (error) {
process.exitCode ||= 1;
console.error('--- Exception:');
console.error(error); // .toString()?
if (error.message && process.exitCode != 130) notify(`prime-gaming failed: ${error.message.split('\n')[0]}`);
} finally {
await db.write(); // write out json db
if (notify_games.length) { // list should only include claimed games
notify(`prime-gaming (${user}):<br>${html_game_list(notify_games)}`);
}
}
if (page.video()) console.log('Recorded video:', await page.video().path());
await context.close();

View File

@ -1,6 +0,0 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended"
]
}

View File

@ -1,9 +0,0 @@
sonar.organization=vogler
sonar.projectKey=vogler_free-games-claimer
# relative paths to source directories. More details and properties are described
# in https://sonarcloud.io/documentation/project-administration/narrowing-the-focus/
sonar.sources=.
#Eslint issues
sonar.eslint.reportPaths = eslint_report.json

View File

@ -1,53 +0,0 @@
import * as dotenv from 'dotenv';
import { dataDir } from './util.js';
dotenv.config({ path: 'data/config.env' }); // loads env vars from file - will not set vars that are already set, i.e., can overwrite values from file by prefixing, e.g., VAR=VAL node ...
// Options - also see table in README.md
export const cfg = {
debug: process.env.DEBUG == '1' || process.env.PWDEBUG == '1', // runs non-headless and opens https://playwright.dev/docs/inspector
debug_network: process.env.DEBUG_NETWORK == '1', // log network requests and responses
record: process.env.RECORD == '1', // `recordHar` (network) + `recordVideo`
time: process.env.TIME == '1', // log duration of each step
dryrun: process.env.DRYRUN == '1', // don't claim anything
interactive: process.env.INTERACTIVE == '1', // confirm to claim, default skip
show: process.env.SHOW == '1', // run non-headless
get headless() {
return !this.debug && !this.show;
},
width: Number(process.env.WIDTH) || 1920, // width of the opened browser
height: Number(process.env.HEIGHT) || 1080, // height of the opened browser
timeout: (Number(process.env.TIMEOUT) || 60) * 1000, // default timeout for playwright is 30s
login_timeout: (Number(process.env.LOGIN_TIMEOUT) || 180) * 1000, // higher timeout for login, will wait twice: prompt + wait for manual login
novnc_port: process.env.NOVNC_PORT, // running in docker if set
notify: process.env.NOTIFY, // apprise notification services
notify_title: process.env.NOTIFY_TITLE, // apprise notification title
get dir() { // avoids ReferenceError: Cannot access 'dataDir' before initialization
return {
browser: process.env.BROWSER_DIR || dataDir('browser'), // for multiple accounts or testing
screenshots: process.env.SCREENSHOTS_DIR || dataDir('screenshots'), // set to 0 to disable screenshots
};
},
// auth epic-games
eg_email: process.env.EG_EMAIL || process.env.EMAIL,
eg_password: process.env.EG_PASSWORD || process.env.PASSWORD,
eg_otpkey: process.env.EG_OTPKEY,
eg_parentalpin: process.env.EG_PARENTALPIN,
// auth prime-gaming
pg_email: process.env.PG_EMAIL || process.env.EMAIL,
pg_password: process.env.PG_PASSWORD || process.env.PASSWORD,
pg_otpkey: process.env.PG_OTPKEY,
// auth gog
gog_email: process.env.GOG_EMAIL || process.env.EMAIL,
gog_password: process.env.GOG_PASSWORD || process.env.PASSWORD,
gog_newsletter: process.env.GOG_NEWSLETTER == '1', // do not unsubscribe from newsletter after claiming a game
// auth AliExpress
ae_email: process.env.AE_EMAIL || process.env.EMAIL,
ae_password: process.env.AE_PASSWORD || process.env.PASSWORD,
// OTP only via GOG_EMAIL, can't add app...
// experimmental
pg_redeem: process.env.PG_REDEEM == '1', // prime-gaming: redeem keys on external stores
lg_email: process.env.LG_EMAIL || process.env.PG_EMAIL || process.env.EMAIL, // prime-gaming: external: legacy-games: email to use for redeeming
pg_claimdlc: process.env.PG_CLAIMDLC == '1', // prime-gaming: claim in-game content
pg_timeLeft: Number(process.env.PG_TIMELEFT), // prime-gaming: check time left to claim and skip game if there are more than PG_TIMELEFT days left to claim it
};

View File

@ -1,33 +0,0 @@
import { existsSync } from 'fs';
import { Low } from 'lowdb';
import { JSONFile } from 'lowdb/node';
import { datetime } from './util.js';
const datetime_UTCtoLocalTimezone = async file => {
if (!existsSync(file)) return console.error('File does not exist:', file);
const db = new Low(new JSONFile(file));
await db.read();
db.data ||= {};
console.log('Migrating', file);
for (const user in db.data) {
for (const game in db.data[user]) {
const time1 = db.data[user][game].time;
const time1s = time1.endsWith('Z') ? time1 : time1 + ' UTC';
const time2 = datetime(new Date(time1s));
console.log([game, time1, time2]);
db.data[user][game].time = time2;
}
}
// console.log(db.data);
await db.write(); // write out json db
};
const args = process.argv.slice(2);
if (args[0] == 'localtime') {
const files = args.slice(1);
console.log('Will convert UTC datetime to local timezone for', files);
files.forEach(datetime_UTCtoLocalTimezone);
} else {
console.log('Usage: node migrate.js <cmd> <args>');
console.log(' node migrate.js localtime data/*.json');
}

View File

@ -1,138 +0,0 @@
// https://stackoverflow.com/questions/46745014/alternative-for-dirname-in-node-js-when-using-es6-modules
import path from 'node:path';
import { fileURLToPath } from 'node:url';
// not the same since these will give the absolute paths for this file instead of for the file using them
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// explicit object instead of Object.fromEntries since the built-in type would loose the keys, better type: https://dev.to/svehla/typescript-object-fromentries-389c
export const dataDir = s => path.resolve(__dirname, '..', 'data', s);
// modified path.resolve to return null if first argument is '0', used to disable screenshots
export const resolve = (...a) => a.length && a[0] == '0' ? null : path.resolve(...a);
// json database
import { JSONFilePreset } from 'lowdb/node';
export const jsonDb = (file, defaultData) => JSONFilePreset(dataDir(file), defaultData);
export const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
// date and time as UTC (no timezone offset) in nicely readable and sortable format, e.g., 2022-10-06 12:05:27.313
export const datetimeUTC = (d = new Date()) => d.toISOString().replace('T', ' ').replace('Z', '');
// same as datetimeUTC() but for local timezone, e.g., UTC + 2h for the above in DE
export const datetime = (d = new Date()) => datetimeUTC(new Date(d.getTime() - d.getTimezoneOffset() * 60000));
export const filenamify = s => s.replaceAll(':', '.').replace(/[^a-z0-9 _\-.]/gi, '_'); // alternative: https://www.npmjs.com/package/filenamify - On Unix-like systems, / is reserved. On Windows, <>:"/\|?* along with trailing periods are reserved.
export const handleSIGINT = (context = null) => process.on('SIGINT', async () => { // e.g. when killed by Ctrl-C
console.error('\nInterrupted by SIGINT. Exit!'); // Exception shows where the script was:\n'); // killed before catch in docker...
process.exitCode = 130; // 128+SIGINT to indicate to parent that process was killed
if (context) await context.close(); // in order to save recordings also on SIGINT, we need to disable Playwright's handleSIGINT and close the context ourselves
});
export const launchChromium = async options => {
const { chromium } = await import('playwright-chromium'); // stealth plugin needs no outdated playwright-extra
// https://www.nopecha.com extension source from https://github.com/NopeCHA/NopeCHA/releases/tag/0.1.16
// const ext = path.resolve('nopecha'); // used in Chromium, currently not needed in Firefox
const context = chromium.launchPersistentContext(cfg.dir.browser, {
// chrome will not work in linux arm64, only chromium
// channel: 'chrome', // https://playwright.dev/docs/browsers#google-chrome--microsoft-edge
args: [ // https://peter.sh/experiments/chromium-command-line-switches
// don't want to see bubble 'Restore pages? Chrome didn't shut down correctly.'
// '--restore-last-session', // does not apply for crash/killed
'--hide-crash-restore-bubble',
// `--disable-extensions-except=${ext}`,
// `--load-extension=${ext}`,
],
// ignoreDefaultArgs: ['--enable-automation'], // remove default arg that shows the info bar with 'Chrome is being controlled by automated test software.'. Since Chromeium 106 this leads to show another info bar with 'You are using an unsupported command-line flag: --no-sandbox. Stability and security will suffer.'.
...options,
});
return context;
};
export const stealth = async context => {
// stealth with playwright: https://github.com/berstend/puppeteer-extra/issues/454#issuecomment-917437212
// https://github.com/berstend/puppeteer-extra/tree/master/packages/puppeteer-extra-plugin-stealth/evasions
const enabledEvasions = [
'chrome.app',
'chrome.csi',
'chrome.loadTimes',
'chrome.runtime',
// 'defaultArgs',
'iframe.contentWindow',
'media.codecs',
'navigator.hardwareConcurrency',
'navigator.languages',
'navigator.permissions',
'navigator.plugins',
// 'navigator.vendor',
'navigator.webdriver',
'sourceurl',
// 'user-agent-override', // doesn't work since playwright has no page.browser()
'webgl.vendor',
'window.outerdimensions',
];
const stealth = {
callbacks: [],
async evaluateOnNewDocument(...args) {
this.callbacks.push({ cb: args[0], a: args[1] });
},
};
for (const e of enabledEvasions) {
const evasion = await import(`puppeteer-extra-plugin-stealth/evasions/${e}/index.js`);
evasion.default().onPageCreated(stealth);
}
for (const evasion of stealth.callbacks) {
await context.addInitScript(evasion.cb, evasion.a);
}
};
// used prompts before, but couldn't cancel prompt
// alternative inquirer is big (node_modules 29MB, enquirer 9.7MB, prompts 9.8MB, none 9.4MB) and slower
// open issue: prevents handleSIGINT() to work if prompt is cancelled with Ctrl-C instead of Escape: https://github.com/enquirer/enquirer/issues/372
import Enquirer from 'enquirer'; const enquirer = new Enquirer();
const timeoutPlugin = timeout => enquirer => { // cancel prompt after timeout ms
enquirer.on('prompt', prompt => {
const t = setTimeout(() => {
prompt.hint = () => 'timeout';
prompt.cancel();
}, timeout);
prompt.on('submit', _ => clearTimeout(t));
prompt.on('cancel', _ => clearTimeout(t));
});
};
enquirer.use(timeoutPlugin(cfg.login_timeout)); // TODO may not want to have this timeout for all prompts; better extend Prompt and add a timeout prompt option
// single prompt that just returns the non-empty value instead of an object
// @ts-ignore
export const prompt = o => enquirer.prompt({ name: 'name', type: 'input', message: 'Enter value', ...o }).then(r => r.name).catch(_ => {});
export const confirm = o => prompt({ type: 'confirm', message: 'Continue?', ...o });
// notifications via apprise CLI
import { execFile } from 'child_process';
import { cfg } from './config.js';
export const notify = html => new Promise((resolve, reject) => {
if (!cfg.notify) {
if (cfg.debug) console.debug('notify: NOTIFY is not set!');
return resolve();
}
// const cmd = `apprise '${cfg.notify}' ${title} -i html -b '${html}'`; // this had problems if e.g. ' was used in arg; could have `npm i shell-escape`, but instead using safer execFile which takes args as array instead of exec which spawned a shell to execute the command
const args = [cfg.notify, '-i', 'html', '-b', `'${html}'`];
if (cfg.notify_title) args.push(...['-t', cfg.notify_title]);
if (cfg.debug) console.debug(`apprise ${args.map(a => `'${a}'`).join(' ')}`); // this also doesn't escape, but it's just for info
execFile('apprise', args, (error, stdout, stderr) => {
if (error) {
console.log(`error: ${error.message}`);
if (error.message.includes('command not found')) {
console.info('Run `pip install apprise`. See https://github.com/vogler/free-games-claimer#notifications');
}
return reject(error);
}
if (stderr) console.error(`stderr: ${stderr}`);
if (stdout) console.log(`stdout: ${stdout}`);
resolve();
});
});
export const escapeHtml = unsafe => unsafe.replaceAll('&', '&amp;').replaceAll('<', '&lt;').replaceAll('>', '&gt;').replaceAll('"', '&quot;').replaceAll('\'', '&#039;');
export const html_game_list = games => games.map(g => `- <a href="${g.url}">${escapeHtml(g.title)}</a> (${g.status})`).join('<br>');

View File

@ -1,49 +0,0 @@
// check if running the latest version
import { log } from 'console';
import { exec } from 'child_process';
const execp = cmd => new Promise((resolve, reject) => {
exec(cmd, (error, stdout, stderr) => {
if (stderr) console.error(`stderr: ${stderr}`);
// if (stdout) console.log(`stdout: ${stdout}`);
if (error) {
console.log(`error: ${error.message}`);
if (error.message.includes('command not found')) {
console.info('Install git to check for updates!');
}
return reject(error);
}
resolve(stdout.trim());
});
});
// const git_main = () => readFileSync('.git/refs/heads/main').toString().trim();
let sha, date;
// if (existsSync('/.dockerenv')) { // did not work
if (process.env.NOVNC_PORT) {
log('Running inside Docker.');
['COMMIT', 'BRANCH', 'NOW'].forEach(v => log(` ${v}:`, process.env[v]));
sha = process.env.COMMIT;
date = process.env.NOW;
} else {
log('Not running inside Docker.');
sha = await execp('git rev-parse HEAD');
date = await execp('git show -s --format=%cD'); // same as format as `date -R` (RFC2822)
// date = await execp('git show -s --format=%ch'); // %ch is same as --date=human (short/relative)
}
const gh = await (await fetch('https://api.github.com/repos/vogler/free-games-claimer/commits/main', {
// headers: { accept: 'application/vnd.github.VERSION.sha' }
})).json();
// log(gh);
log('Local commit:', sha, new Date(date));
log('Online commit:', gh.sha, new Date(gh.commit.committer.date));
if (sha == gh.sha) {
log('Running the latest version!');
} else {
log('Not running the latest version!');
}

View File

@ -1,38 +0,0 @@
/* eslint-disable no-constant-condition */
import { delay, html_game_list, notify } from '../src/util.js';
import { cfg } from '../src/config.js';
const URL_CLAIM = 'https://gaming.amazon.com/home'; // dummy URL
console.debug('NOTIFY:', cfg.notify);
if (true) {
const notify_games = [
// { title: 'Kerbal Space Program', status: 'claimed', url: URL_CLAIM },
// { title: "Shadow Tactics - Aiko's Choice", status: 'claimed', url: URL_CLAIM },
{ title: 'Epistory - Typing Chronicles', status: 'claimed', url: URL_CLAIM },
];
await notify(`epic-games:<br>${html_game_list(notify_games)}`);
}
if (false) {
await delay(1000);
const notify_games = [
{ title: 'Faraway 2: Jungle Escape', status: 'claimed', url: URL_CLAIM },
{ title: 'Chicken Police - Paint it RED!', status: 'claimed', url: URL_CLAIM },
{ title: 'Lawn Mowing Simulator', status: 'claimed', url: URL_CLAIM },
{ title: 'Breathedge', status: 'claimed', url: URL_CLAIM },
{ title: 'The Evil Within 2', status: `<a href="${URL_CLAIM}">redeem</a> H97S6FB38FA6D09DEA on gog.com`, url: URL_CLAIM },
{ title: 'Beat Cop', status: `<a href="${URL_CLAIM}">redeem</a> BMKM8558EC55F7B38F on gog.com`, url: URL_CLAIM },
{ title: 'Dishonored 2', status: `<a href="${URL_CLAIM}">redeem</a> NNEK0987AB20DFBF8F on gog.com`, url: URL_CLAIM },
];
notify(`prime-gaming:<br>${html_game_list(notify_games)}`);
}
if (false) {
await delay(1000);
const notify_games = [
{ title: 'Haven Park', status: 'claimed', url: URL_CLAIM },
];
notify(`gog:<br>${html_game_list(notify_games)}`);
}

View File

@ -1,40 +0,0 @@
// https://github.com/enquirer/enquirer/issues/372
import { prompt } from '../src/util.js';
const handleSIGINT = () => process.on('SIGINT', () => { // e.g. when killed by Ctrl-C
console.log('\nInterrupted by SIGINT. Exit!');
process.exitCode = 130;
});
handleSIGINT();
function onRawSIGINT(fn) {
const { stdin, stdout } = process;
stdin.setRawMode(true);
stdin.resume();
stdin.on('data', data => {
const key = data.toString('utf-8');
if (key === '\u0003') { // ctrl + c
fn();
} else {
stdout.write(key);
}
});
}
onRawSIGINT(() => {
console.log('raw'); process.exit(1);
});
console.log('hello');
console.error('hello error');
try {
let i = 'foo';
i = await prompt(); // SIGINT no longer handled if this is executed
// handleSIGINT();
console.log('value:', i);
setTimeout(() => console.log('timeout 3s'), 3000);
} catch (e) {
process.exitCode ||= 1;
console.log('catch. exitCode:', process.exitCode);
console.error(e);
}
console.log('end. exitCode:', process.exitCode);

View File

@ -1,15 +0,0 @@
// https://github.com/enquirer/enquirer/issues/372
import Enquirer from 'enquirer';
const enquirer = new Enquirer();
let interrupted = false;
process.on('SIGINT', () => {
if (interrupted) process.exit();
interrupted = true;
console.log('SIGINT');
});
await enquirer.prompt({
type: 'input',
name: 'username',
message: 'What is your username?',
});

View File

@ -1,206 +0,0 @@
// TODO This is mostly a copy of epic-games.js
// New assets to claim every first Tuesday of a month.
import { firefox } from 'playwright-firefox'; // stealth plugin needs no outdated playwright-extra
import { authenticator } from 'otplib';
import path from 'path';
import { writeFileSync } from 'fs';
import { resolve, jsonDb, datetime, stealth, filenamify, prompt, notify, html_game_list, handleSIGINT } from './src/util.js';
import { cfg } from './src/config.js';
const screenshot = (...a) => resolve(cfg.dir.screenshots, 'unrealengine', ...a);
const URL_CLAIM = 'https://www.unrealengine.com/marketplace/en-US/assets?count=20&sortBy=effectiveDate&sortDir=DESC&start=0&tag=4910';
const URL_LOGIN = 'https://www.epicgames.com/id/login?lang=en-US&noHostRedirect=true&redirectUrl=' + URL_CLAIM;
console.log(datetime(), 'started checking unrealengine');
const db = await jsonDb('unrealengine.json', {});
// https://playwright.dev/docs/auth#multi-factor-authentication
const context = await firefox.launchPersistentContext(cfg.dir.browser, {
headless: cfg.headless,
viewport: { width: cfg.width, height: cfg.height },
userAgent: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.83 Safari/537.36', // see replace of Headless in util.newStealthContext. TODO Windows UA enough to avoid 'device not supported'? update if browser is updated?
// userAgent for firefox: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:106.0) Gecko/20100101 Firefox/106.0
locale: 'en-US', // ignore OS locale to be sure to have english text for locators
recordVideo: cfg.record ? { dir: 'data/record/', size: { width: cfg.width, height: cfg.height } } : undefined, // will record a .webm video for each page navigated; without size, video would be scaled down to fit 800x800
recordHar: cfg.record ? { path: `data/record/ue-${filenamify(datetime())}.har` } : undefined, // will record a HAR file with network requests and responses; can be imported in Chrome devtools
handleSIGINT: false, // have to handle ourselves and call context.close(), otherwise recordings from above won't be saved
});
handleSIGINT(context);
await stealth(context);
if (!cfg.debug) context.setDefaultTimeout(cfg.timeout);
const page = context.pages().length ? context.pages()[0] : await context.newPage(); // should always exist
await page.setViewportSize({ width: cfg.width, height: cfg.height }); // TODO workaround for https://github.com/vogler/free-games-claimer/issues/277 until Playwright fixes it
// console.debug('userAgent:', await page.evaluate(() => navigator.userAgent));
const notify_games = [];
let user;
try {
await context.addCookies([{ name: 'OptanonAlertBoxClosed', value: new Date(Date.now() - 5 * 24 * 60 * 60 * 1000).toISOString(), domain: '.epicgames.com', path: '/' }]); // Accept cookies to get rid of banner to save space on screen. Set accept time to 5 days ago.
await page.goto(URL_CLAIM, { waitUntil: 'domcontentloaded' }); // 'domcontentloaded' faster than default 'load' https://playwright.dev/docs/api/class-page#page-goto
await page.waitForResponse(r => r.request().method() == 'POST' && r.url().startsWith('https://graphql.unrealengine.com/ue/graphql'));
while (await page.locator('unrealengine-navigation').getAttribute('isloggedin') != 'true') {
console.error('Not signed in anymore. Please login in the browser or here in the terminal.');
if (cfg.novnc_port) console.info(`Open http://localhost:${cfg.novnc_port} to login inside the docker container.`);
if (!cfg.debug) context.setDefaultTimeout(cfg.login_timeout); // give user some extra time to log in
console.info(`Login timeout is ${cfg.login_timeout / 1000} seconds!`);
await page.goto(URL_LOGIN, { waitUntil: 'domcontentloaded' });
if (cfg.eg_email && cfg.eg_password) console.info('Using email and password from environment.');
else console.info('Press ESC to skip the prompts if you want to login in the browser (not possible in headless mode).');
const email = cfg.eg_email || await prompt({ message: 'Enter email' });
const password = email && (cfg.eg_password || await prompt({ type: 'password', message: 'Enter password' }));
if (email && password) {
// await page.click('text=Sign in with Epic Games');
await page.fill('#email', email);
await page.click('button[type="submit"]');
await page.fill('#password', password);
await page.click('button[type="submit"]');
page.waitForSelector('#h_captcha_challenge_login_prod iframe').then(() => {
console.error('Got a captcha during login (likely due to too many attempts)! You may solve it in the browser, get a new IP or try again in a few hours.');
notify('unrealengine: got captcha during login. Please check.');
}).catch(_ => { });
// handle MFA, but don't await it
page.waitForURL('**/id/login/mfa**').then(async () => {
console.log('Enter the security code to continue - This appears to be a new device, browser or location. A security code has been sent to your email address at ...');
// TODO locator for text (email or app?)
const otp = cfg.eg_otpkey && authenticator.generate(cfg.eg_otpkey) || await prompt({ type: 'text', message: 'Enter two-factor sign in code', validate: n => n.toString().length == 6 || 'The code must be 6 digits!' }); // can't use type: 'number' since it strips away leading zeros and codes sometimes have them
await page.locator('input[name="code-input-0"]').pressSequentially(otp.toString());
await page.click('button[type="submit"]');
}).catch(_ => { });
} else {
console.log('Waiting for you to login in the browser.');
await notify('unrealengine: no longer signed in and not enough options set for automatic login.');
if (cfg.headless) {
console.log('Run `SHOW=1 node unrealengine` to login in the opened browser.');
await context.close(); // finishes potential recording
process.exit(1);
}
}
await page.waitForURL('**unrealengine.com/marketplace/**');
if (!cfg.debug) context.setDefaultTimeout(cfg.timeout);
}
await page.waitForTimeout(1000);
user = await page.locator('unrealengine-navigation').getAttribute('displayname'); // 'null' if !isloggedin
console.log(`Signed in as ${user}`);
db.data[user] ||= {};
page.locator('button:has-text("Accept All Cookies")').click().catch(_ => { });
const ids = [];
for (const p of await page.locator('article.asset').all()) {
const link = p.locator('h3 a');
const title = await link.innerText();
const url = 'https://www.unrealengine.com' + await link.getAttribute('href');
console.log([title, url]);
const id = url.split('/').pop();
db.data[user][id] ||= { title, time: datetime(), url, status: 'failed' }; // this will be set on the initial run only!
const notify_game = { title, url, status: 'failed' };
notify_games.push(notify_game); // status is updated below
// if (await p.locator('.btn .add-review-btn').count()) { // did not work
if ((await p.getAttribute('class')).includes('asset--owned')) {
console.log(' ↳ Already claimed');
if (db.data[user][id].status != 'claimed') {
db.data[user][id].status = 'existed';
notify_game.status = 'existed';
}
continue;
}
if (await p.locator('.btn .in-cart').count()) {
console.log(' ↳ Already in cart');
} else {
await p.locator('.btn .add').click();
console.log(' ↳ Added to cart');
}
ids.push(id);
}
if (!ids.length) {
console.log('Nothing to claim');
} else {
await page.waitForTimeout(2000);
const price = (await page.locator('.shopping-cart .total .price').innerText()).split(' ');
console.log('Price: ', price[1], 'instead of', price[0]);
if (price[1] != '0') {
const err = 'Price is not 0! Exit! Please <a href="https://github.com/vogler/free-games-claimer/issues/44">report</a>.';
console.error(err);
notify('unrealengine: ' + err);
process.exit(1);
}
// await page.pause();
console.log('Click shopping cart');
await page.locator('.shopping-cart').click();
// await page.waitForTimeout(2000);
await page.locator('button.checkout').click();
console.log('Click checkout');
// maybe: Accept End User License Agreement
page.locator('[name=accept-label]').check().then(() => {
console.log('Accept End User License Agreement');
page.locator('span:text-is("Accept")').click(); // otherwise matches 'Accept All Cookies'
}).catch(_ => { });
await page.waitForSelector('#webPurchaseContainer iframe'); // TODO needed?
const iframe = page.frameLocator('#webPurchaseContainer iframe');
if (cfg.debug) await page.pause();
if (cfg.dryrun) {
console.log('DRYRUN=1 -> Skip order!');
throw new Error('DRYRUN=1');
}
console.log('Click Place Order');
// Playwright clicked before button was ready to handle event, https://github.com/vogler/free-games-claimer/issues/84#issuecomment-1474346591
await iframe.locator('button:has-text("Place Order"):not(:has(.payment-loading--loading))').click({ delay: 11 });
// I Agree button is only shown for EU accounts! https://github.com/vogler/free-games-claimer/pull/7#issuecomment-1038964872
const btnAgree = iframe.locator('button:has-text("I Agree")');
btnAgree.waitFor().then(() => btnAgree.click()).catch(_ => { }); // EU: wait for and click 'I Agree'
try {
// context.setDefaultTimeout(100 * 1000); // give time to solve captcha, iframe goes blank after 60s?
const captcha = iframe.locator('#h_captcha_challenge_checkout_free_prod iframe');
captcha.waitFor().then(async () => { // don't await, since element may not be shown
// console.info(' Got hcaptcha challenge! NopeCHA extension will likely solve it.')
console.error(' Got hcaptcha challenge! Lost trust due to too many login attempts? You can solve the captcha in the browser or get a new IP address.');
}).catch(_ => { }); // may time out if not shown
await page.waitForSelector('text=Thank you');
for (const id of ids) {
db.data[user][id].status = 'claimed';
db.data[user][id].time = datetime(); // claimed time overwrites failed/dryrun time
}
notify_games.forEach(g => g.status == 'failed' && (g.status = 'claimed'));
console.log('Claimed successfully!');
// context.setDefaultTimeout(cfg.timeout);
} catch (e) {
console.log(e);
// console.error(' Failed to claim! Try again if NopeCHA timed out. Click the extension to see if you ran out of credits (refill after 24h). To avoid captchas try to get a new IP or set a cookie from https://www.hcaptcha.com/accessibility');
console.error(' Failed to claim! To avoid captchas try to get a new IP address.');
await page.screenshot({ path: screenshot('failed', `${filenamify(datetime())}.png`), fullPage: true });
// db.data[user][id].status = 'failed';
notify_games.forEach(g => g.status = 'failed');
}
// notify_game.status = db.data[user][game_id].status; // claimed or failed
if (notify_games.length) await page.screenshot({ path: screenshot(`${filenamify(datetime())}.png`), fullPage: false }); // fullPage is quite long...
console.log('Done');
}
} catch (error) {
process.exitCode ||= 1;
console.error('--- Exception:');
console.error(error); // .toString()?
if (error.message && process.exitCode != 130) notify(`unrealengine failed: ${error.message.split('\n')[0]}`);
} finally {
await db.write(); // write out json db
if (notify_games.filter(g => g.status != 'existed').length) { // don't notify if all were already claimed
notify(`unrealengine (${user}):<br>${html_game_list(notify_games)}`);
}
}
if (cfg.debug) writeFileSync(path.resolve(cfg.dir.browser, 'cookies.json'), JSON.stringify(await context.cookies()));
if (page.video()) console.log('Recorded video:', await page.video().path());
await context.close();

View File

@ -1,386 +0,0 @@
# Huly Self-Hosted
Please use this README if you want to deploy Huly on your server with `docker compose`. I'm using a Basic Droplet on Digital Ocean with Ubuntu 24.04, but these instructions can be easily adapted for any Linux distribution.
> [!NOTE]
> Huly is quite resource-heavy, so I recommend using a Droplet with 2 vCPUs and 4GB of RAM. Droplets with less RAM may
> stop responding or fail.
If you prefer Kubernetes deployment, there is a sample Kubernetes configuration under [kube](kube) directory.
## Installing `nginx` and `docker`
First, update repositories cache:
```bash
sudo apt update
```
Now, install `nginx`:
```bash
sudo apt install nginx
```
Install docker using the [recommended method](https://docs.docker.com/engine/install/ubuntu/) from docker website.
Afterwards perform [post-installation steps](https://docs.docker.com/engine/install/linux-postinstall/). Pay attention to 3rd step with `newgrp docker` command, it needed for correct execution in setup script.
## Clone the `huly-selfhost` repository and configure `nginx`
Next, let's clone the `huly-selfhost` repository and configure Huly.
```bash
git clone https://github.com/hcengineering/huly-selfhost.git
cd huly-selfhost
./setup.sh
```
This will generate a [huly.conf](./huly.conf) file with your chosen values and create your nginx config.
To add the generated configuration to your Nginx setup, run the following:
```bash
sudo ln -s $(pwd)/nginx.conf /etc/nginx/sites-enabled/huly.conf
```
> [!NOTE]
> If you change `HOST_ADDRESS`, `SECURE`, `HTTP_PORT` or `HTTP_BIND` be sure to update your [nginx.conf](./nginx.conf)
> by running:
>
> ```bash
> ./nginx.sh
> ```
>
> You can safely execute this script after adding your custom configurations like ssl. It will only overwrite the
> necessary settings.
Finally, let's reload `nginx` and start Huly with `docker compose`.
```bash
sudo nginx -s reload
sudo docker compose up -d
```
Now, launch your web browser and enjoy Huly!
## Generating Public and Private VAPID keys for front-end
You'll need `Node.js` installed on your machine. Installing `npm` on Debian based distro:
```
sudo apt-get install npm
```
Install web-push using npm
```bash
sudo npm install -g web-push
```
Generate VAPID Keys. Run the following command to generate a VAPID key pair:
```
web-push generate-vapid-keys
```
It will generate both keys that looks like this:
```bash
=======================================
Public Key:
sdfgsdgsdfgsdfggsdf
Private Key:
asdfsadfasdfsfd
=======================================
```
Keep these keys secure, as you will need them to set up your push notification service on the server.
Add these keys into `compose.yaml` in section `services:ses:environment`:
```yaml
- PUSH_PUBLIC_KEY=your public key
- PUSH_PRIVATE_KEY=your private key
```
## Mail Service
The Mail Service is responsible for sending email notifications and confirmation emails during user login or signup processes. It can be configured to send emails through either an SMTP server or Amazon SES (Simple Email Service), but not both at the same time.
### General Configuration
1. Add the `mail` container to the `docker-compose.yaml` file. Specify the email address you will use to send emails as "SOURCE":
```yaml
mail:
image: hardcoreeng/mail:v0.6.501
container_name: mail
ports:
- 8097:8097
environment:
- PORT=8097
- SOURCE=<EMAIL_FROM>
restart: unless-stopped
```
2. Add the mail container URL to the `transactor` and `account` containers:
```yaml
account:
...
environment:
- MAIL_URL=http://mail:8097
...
transactor:
...
environment:
- MAIL_URL=http://mail:8097
...
```
3. In `Settings -> Notifications`, set up email notifications for the events you want to be notified about. Note that this is a user-specific setting, not company-wide; each user must set up their own notification preferences.
### SMTP Configuration
To integrate with an external SMTP server, update the `docker-compose.yaml` file with the following environment variables:
1. Add SMTP configuration to the environment section:
```yaml
mail:
...
environment:
...
- SMTP_HOST=<SMTP_SERVER_URL>
- SMTP_PORT=<SMTP_SERVER_PORT>
- SMTP_USERNAME=<SMTP_USER>
- SMTP_PASSWORD=<SMTP_PASSWORD>
```
2. Replace `<SMTP_SERVER_URL>` and `<SMTP_SERVER_PORT>` with your SMTP server's hostname and port. It's recommended to use a secure port, such as `587`.
3. Replace `<SMTP_USER>` and `<SMTP_PASSWORD>` with credentials for an account that can send emails via your SMTP server. If your service provider supports it, consider using an application API key as `<SMTP_USER>` and a token as `<SMTP_PASSWORD>` for enhanced security.
### Amazon SES Configuration
1. Set up Amazon Simple Email Service in AWS: [AWS SES Setup Guide](https://docs.aws.amazon.com/ses/latest/dg/setting-up.html)
2. Create a new IAM policy with the following permissions:
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["ses:SendEmail", "ses:SendRawEmail"],
"Resource": "*"
}
]
}
```
3. Create a separate IAM user for SES API access, assigning the newly created policy to this user.
4. Configure SES environment variables in the `mail` container:
```yaml
mail:
...
environment:
...
- SES_ACCESS_KEY=<SES_ACCESS_KEY>
- SES_SECRET_KEY=<SES_SECRET_KEY>
- SES_REGION=<SES_REGION>
```
### Notes
1. SMTP and SES configurations cannot be used simultaneously.
2. `SES_URL` is not supported in version v0.6.470 and later, please use `MAIL_URL` instead.
## Love Service (Audio & Video calls)
Huly audio and video calls are created on top of LiveKit insfrastructure. In order to use Love service in your
self-hosted Huly, perform the following steps:
1. Set up [LiveKit Cloud](https://cloud.livekit.io) account
2. Add `love` container to the docker-compose.yaml
```yaml
love:
image: hardcoreeng/love:v0.6.501
container_name: love
ports:
- 8096:8096
environment:
- STORAGE_CONFIG=minio|minio?accessKey=minioadmin&secretKey=minioadmin
- SECRET=secret
- ACCOUNTS_URL=http://account:3000
- DB_URL=mongodb://mongodb:27017
- MONGO_URL=mongodb://mongodb:27017
- STORAGE_PROVIDER_NAME=minio
- PORT=8096
- LIVEKIT_HOST=<LIVEKIT_HOST>
- LIVEKIT_API_KEY=<LIVEKIT_API_KEY>
- LIVEKIT_API_SECRET=<LIVEKIT_API_SECRET>
restart: unless-stopped
```
3. Configure `front` service:
```yaml
front:
...
environment:
- LIVEKIT_WS=<LIVEKIT_HOST>
- LOVE_ENDPOINT=http://love:8096
...
```
## AI Service
Huly provides AI-powered chatbot that provides several services:
- chat with AI
- text message translations in the chat
- live translations for virtual office voice and video chats
1. Set up OpenAI account
2. Add `aibot` container to the docker-compose.yaml
```yaml
aibot:
image: hardcoreeng/ai-bot:v0.6.501
ports:
- 4010:4010
environment:
- STORAGE_CONFIG=minio|minio?accessKey=minioadmin&secretKey=minioadmin
- SERVER_SECRET=secret
- ACCOUNTS_URL=http://account:3000
- DB_URL=mongodb://mongodb:27017
- MONGO_URL=mongodb://mongodb:27017
- STATS_URL=http://stats:4900
- FIRST_NAME=Bot
- LAST_NAME=Huly AI
- PASSWORD=<PASSWORD>
- OPENAI_API_KEY=<OPENAI_API_KEY>
- OPENAI_BASE_URL=<OPENAI_BASE_URL>
# optional if you use love service
- LOVE_ENDPOINT=http://love:8096
restart: unless-stopped
```
3. Configure `front` service:
```yaml
front:
...
environment:
# this should be available outside of the cluster
- AI_URL=http://aibot:4010
...
```
4. Configure `transactor` service:
```yaml
transactor:
...
environment:
# this should be available inside of the cluster
- AI_BOT_URL=http://aibot:4010
...
```
## Configure OpenID Connect (OIDC)
You can configure a Huly instance to authorize users (sign-in/sign-up) using an OpenID Connect identity provider (IdP).
### On the IdP side
1. Create a new OpenID application.
- Use `{huly_account_svc}/auth/openid/callback` as the sign-in redirect URI. The `huly_account_svc` is the hostname for the account service of the deployment, which should be accessible externally from the client/browser side. In the provided example setup, the account service runs on port 3000.
**URI Example:**
- `http://huly.mydomain.com:3000/auth/openid/callback`
2. Configure user access to the application as needed.
### On the Huly side
For the account service, set the following environment variables as provided by the IdP:
- OPENID_CLIENT_ID
- OPENID_CLIENT_SECRET
- OPENID_ISSUER
Ensure you have configured or add the following environment variable to the front service:
- ACCOUNTS_URL (This should contain the URL of the account service, accessible from the client side.)
You will need to expose your account service port (e.g. 3000) in your nginx.conf.
Note: Once all the required environment variables are configured, you will see an additional button on the
sign-in/sign-up pages.
## Configure GitHub OAuth
You can also configure a Huly instance to use GitHub OAuth for user authorization (sign-in/sign-up).
### On the GitHub side
1. Create a new GitHub OAuth application.
- Use `{huly_account_svc}/auth/github/callback` as the sign-in redirect URI. The `huly_account_svc` is the hostname for the account service of the deployment, which should be accessible externally from the client/browser side. In the provided example setup, the account service runs on port 3000.
**URI Example:**
- `http://huly.mydomain.com:3000/auth/github/callback`
### On the Huly side
Specify the following environment variables for the account service:
- `GITHUB_CLIENT_ID`
- `GITHUB_CLIENT_SECRET`
Ensure you have configured or add the following environment variable to the front service:
- `ACCOUNTS_URL` (The URL of the account service, accessible from the client side.)
You will need to expose your account service port (e.g. 3000) in your nginx.conf.
Notes:
- The `ISSUER` environment variable is not required for GitHub OAuth.
- Once all the required environment variables are configured, you will see an additional button on the sign-in/sign-up
pages.
## Disable Sign-Up
You can disable public sign-ups for a deployment. When configured, sign-ups will only be permitted through an invite
link to a specific workspace.
To implement this, set the following environment variable for both the front and account services:
```yaml
account:
# ...
environment:
- DISABLE_SIGNUP=true
# ...
front:
# ...
environment:
- DISABLE_SIGNUP=true
# ...
```
_Note: When setting up a new deployment, either create the initial account before disabling sign-ups or use the
development tool to create the first account._

View File

@ -1,602 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2025 community-scripts ORG
# Author: GitHub Copilot
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/hcengineering/huly-selfhost
# Import Functions and Setup
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
# Installing Dependencies
msg_info "Installing Dependencies"
$STD apt-get update
$STD apt-get install -y curl git ca-certificates gnupg nginx lsb-release python3 python3-pip python3-venv
msg_ok "Installed Dependencies"
# Installing Python and uv package manager
msg_info "Setting up Python environment and uv"
setup_uv
msg_ok "Python environment and uv ready"
msg_info "Installing MongoDB"
# Install MongoDB natively
$STD curl -fsSL https://www.mongodb.org/static/pgp/server-7.0.asc | gpg --dearmor -o /etc/apt/keyrings/mongodb-server-7.0.gpg
$STD echo "deb [ arch=amd64,arm64 signed-by=/etc/apt/keyrings/mongodb-server-7.0.gpg ] https://repo.mongodb.org/apt/debian $(lsb_release -cs)/mongodb-org/7.0 main" | tee /etc/apt/sources.list.d/mongodb-org-7.0.list >/dev/null
$STD apt-get update
$STD apt-get install -y mongodb-org
$STD systemctl enable --now mongod
msg_ok "Installed MongoDB"
msg_info "Installing Node.js and web-push"
# Install Node.js for running Huly services
NODE_VERSION=20 install_node_and_modules # This function is from tools.func
$STD npm install -g web-push
msg_ok "Installed Node.js and web-push"
# Use temporary Docker for extracting components
msg_info "Installing Docker temporarily (for Huly component extraction)"
$STD mkdir -p /etc/apt/keyrings
$STD curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg
$STD echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/debian $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list >/dev/null
$STD apt-get update
$STD apt-get install -y docker-ce docker-ce-cli containerd.io
$STD systemctl start docker
msg_ok "Installed Docker temporarily"
msg_info "Setting up Huly environment and configuration"
# Create necessary directories
$STD mkdir -p /opt/huly-selfhost
$STD mkdir -p /opt/huly/{front,account,transactor,collaborator,rekoni}
# Get server IP
SERVER_IP=$(hostname -I | awk '{print $1}')
# Generate VAPID keys for push notifications
VAPID_OUTPUT=$($STD web-push generate-vapid-keys)
VAPID_PUBLIC_KEY=$(echo "$VAPID_OUTPUT" | grep "Public Key:" | cut -d ":" -f2 | tr -d ' ')
VAPID_PRIVATE_KEY=$(echo "$VAPID_OUTPUT" | grep "Private Key:" | cut -d ":" -f2 | tr -d ' ')
# Create MongoDB database and user for Huly
# Ensure mongosh is available or use mongo if older version
MONGO_EXEC=$(command -v mongosh || command -v mongo)
if [ -z "$MONGO_EXEC" ]; then
msg_error "Neither mongosh nor mongo command found. Cannot create MongoDB user for Huly."
exit 1
fi
$STD $MONGO_EXEC --eval "
use huly;
db.createUser({
user: 'huly',
pwd: 'hulypassword123', // Consider making this configurable or randomly generated
roles: [{role: 'readWrite', db: 'huly'}]
});
"
msg_ok "Created MongoDB user and database for Huly"
# Save configuration and VAPID keys
APP_SECRET_KEY=$(openssl rand -hex 32) # Generate a random secret key
cat <<EOF >/opt/huly-selfhost/native.conf
# Huly Native Configuration
SERVER_IP=$SERVER_IP
MONGO_URL=mongodb://huly:hulypassword123@localhost:27017/huly
MINIO_ENDPOINT=localhost:9000
MINIO_ACCESS_KEY=minioadmin # Consider making this configurable or randomly generated
MINIO_SECRET_KEY=minioadmin # Consider making this configurable or randomly generated
APP_SECRET_KEY=$APP_SECRET_KEY
# VAPID Keys for Push Notifications
VAPID_PUBLIC_KEY=$VAPID_PUBLIC_KEY
VAPID_PRIVATE_KEY=$VAPID_PRIVATE_KEY
# Service Ports (ensure these are unique and not conflicting)
FRONT_PORT=3000
ACCOUNT_PORT=3001
TRANSACTOR_PORT=3002
COLLABORATOR_PORT=3078
REKONI_PORT=4004
EOF
cat <<EOF >~/huly.creds
Huly Credentials (save this file securely!)
MongoDB URL: mongodb://huly:hulypassword123@localhost:27017/huly
MinIO Access: minioadmin / minioadmin (Console: http://$SERVER_IP:9001)
App Secret Key: $APP_SECRET_KEY
VAPID Public Key: $VAPID_PUBLIC_KEY
VAPID Private Key: $VAPID_PRIVATE_KEY
EOF
chmod 600 ~/huly.creds
msg_ok "Set up Huly environment and configuration in /opt/huly-selfhost/native.conf and ~/huly.creds"
msg_info "Extracting Huly applications from Docker images"
# Function to extract from container with fallback paths
extract_from_container() {
local component_name="$1"
local image_name="$2"
local target_dir="$3"
local container_name="huly-${component_name}-extract"
msg_info "Pulling Docker image for $component_name: $image_name"
if ! $STD docker pull "$image_name"; then
msg_error "Failed to pull Docker image $image_name for $component_name."
return 1
fi
msg_info "Extracting $component_name from $image_name to $target_dir"
if ! $STD docker create --name "$container_name" "$image_name"; then
msg_error "Failed to create Docker container for $component_name."
return 1
fi
# Try common paths where applications might be located
local extracted=false
# Order matters: specific paths first, then broader ones
for path_in_container in "/app/dist" "/app" "/usr/src/app/dist" "/usr/src/app" "/dist" "/opt/app" "/home/app"; do
# Check if path exists in container before attempting to copy
if $STD docker exec "$container_name" ls "${path_in_container}/." >/dev/null 2>&1; then
if $STD docker cp "${container_name}:${path_in_container}/." "$target_dir/"; then
extracted=true
msg_ok "Extracted $component_name from ${path_in_container}"
break
fi
fi
done
if ! $extracted; then
msg_warn "Could not find standard app directory in $component_name container ($image_name). Attempting to copy entire root '/'. This might be slow and include unnecessary files."
if $STD docker cp "${container_name}:/." "$target_dir/"; then
msg_ok "Copied entire root for $component_name. Review $target_dir for correctness."
else
msg_error "Failed to copy any files for $component_name from $image_name."
$STD docker rm "$container_name"
return 1
fi
fi
$STD docker rm "$container_name"
msg_ok "Extraction process completed for $component_name"
}
# Extract Huly Components
extract_from_container "front" "hardcoreeng/front:latest" "/opt/huly/front"
extract_from_container "account" "hardcoreeng/account:latest" "/opt/huly/account"
extract_from_container "transactor" "hardcoreeng/transactor:latest" "/opt/huly/transactor"
extract_from_container "collaborator" "hardcoreeng/collaborator:latest" "/opt/huly/collaborator"
extract_from_container "rekoni" "hardcoreeng/rekoni:latest" "/opt/huly/rekoni"
msg_ok "Extracted all Huly applications"
msg_info "Installing MinIO for object storage"
# Fetch latest MinIO version dynamically if possible, or use a known good one
# For stability, using a fixed version. Update as needed.
MINIO_VERSION="RELEASE.2024-06-13T22-53-53Z"
$STD curl -fsSL "https://dl.min.io/server/minio/release/linux-amd64/archive/minio.${MINIO_VERSION}" -o /usr/local/bin/minio
$STD chmod +x /usr/local/bin/minio
useradd -r -s /bin/false minio 2>/dev/null || true # Create user if not exists, suppress error if it does
mkdir -p /opt/minio/data /etc/minio
chown -R minio:minio /opt/minio /etc/minio
# Source native.conf for MinIO keys to write into minio.conf
# This ensures consistency if keys were changed in native.conf
source /opt/huly-selfhost/native.conf
cat <<EOF >/etc/minio/minio.conf
MINIO_ROOT_USER=${MINIO_ACCESS_KEY}
MINIO_ROOT_PASSWORD=${MINIO_SECRET_KEY}
MINIO_VOLUMES="/opt/minio/data"
MINIO_OPTS="--console-address :9001"
EOF
msg_ok "Installed and configured MinIO"
msg_info "Setting up Rekoni Python environment"
REKONI_PYTHON_EXEC="/usr/bin/python3" # Default system Python
# Define Rekoni venv path
REKONI_VENV_PATH="/opt/huly-venv/rekoni"
$STD mkdir -p "$(dirname "$REKONI_VENV_PATH")"
if [ -f /opt/huly/rekoni/requirements.txt ]; then
msg_info "Found requirements.txt for Rekoni. Creating virtual environment at $REKONI_VENV_PATH."
if $STD /usr/local/bin/uv venv "$REKONI_VENV_PATH"; then
REKONI_PYTHON_EXEC="$REKONI_VENV_PATH/bin/python3"
msg_info "Installing Rekoni dependencies from requirements.txt using uv into venv."
# Activate venv for pip install or ensure uv uses the correct python
if $STD "$REKONI_VENV_PATH/bin/uv" pip install -r /opt/huly/rekoni/requirements.txt; then
msg_ok "Rekoni dependencies installed into virtual environment."
else
msg_error "Failed to install Rekoni dependencies from requirements.txt using uv. Rekoni might not work."
REKONI_PYTHON_EXEC="/usr/bin/python3" # Fallback to system Python
fi
else
msg_warn "Failed to create virtual environment for Rekoni with uv. Will attempt system-wide Python package installation for Rekoni."
# Attempt to install requirements.txt system-wide as a fallback
msg_info "Attempting to install Rekoni dependencies from requirements.txt system-wide using uv."
if $STD /usr/local/bin/uv pip install --system -r /opt/huly/rekoni/requirements.txt; then
msg_ok "Installed Rekoni dependencies system-wide from requirements.txt."
else
msg_error "Failed to install Rekoni dependencies system-wide from requirements.txt. Rekoni might not work."
fi
fi
else
msg_warn "requirements.txt not found for Rekoni in /opt/huly/rekoni/. Attempting to install a common set of Python packages system-wide."
# List of common dependencies, keep this updated based on Rekoni's needs
COMMON_REKONI_DEPS="fastapi uvicorn python-multipart Pillow Wand pdf2image pytesseract nltk spacy Faker requests beautifulsoup4 readability-lxml sentence-transformers pymongo minio pydantic tiktoken openai InstructorEmbedding PyMuPDF"
msg_info "Installing common Python packages for Rekoni system-wide using uv: $COMMON_REKONI_DEPS"
if $STD /usr/local/bin/uv pip install -v --system $COMMON_REKONI_DEPS; then
msg_ok "Installed common Python packages system-wide for Rekoni."
else
msg_error "Failed to install common Python packages system-wide for Rekoni. Rekoni might not work."
fi
fi
msg_info "Downloading NLTK and Spacy models for Rekoni (using $REKONI_PYTHON_EXEC)"
NLTK_MODELS="punkt stopwords wordnet omw-1.4" # Open Multilingual Wordnet
SPACY_MODEL="en_core_web_sm" # Small English model
# Ensure NLTK_DATA directory exists and is writable if needed, or use a user-writable path
# For system-wide, /usr/share/nltk_data is common. For venv, it might be within the venv.
# The -d flag for nltk.downloader specifies the download directory.
NLTK_DATA_DIR="/usr/share/nltk_data"
$STD mkdir -p "$NLTK_DATA_DIR"
# Grant write permission temporarily if needed, or run as root.
# For simplicity in script, assuming root or sudo for system-wide.
if ! $STD $REKONI_PYTHON_EXEC -m nltk.downloader -d "$NLTK_DATA_DIR" $NLTK_MODELS; then
msg_warn "NLTK model download failed for some components ($NLTK_MODELS) using $REKONI_PYTHON_EXEC. This might affect Rekoni functionality."
fi
if ! $STD $REKONI_PYTHON_EXEC -m spacy download $SPACY_MODEL; then
msg_warn "Spacy model $SPACY_MODEL download failed using $REKONI_PYTHON_EXEC. This might affect Rekoni functionality."
fi
msg_ok "Python setup for Rekoni completed. Rekoni will run using: $REKONI_PYTHON_EXEC"
msg_info "Removing Docker"
$STD systemctl stop docker
$STD apt-get remove -y docker-ce docker-ce-cli containerd.io
$STD apt-get purge -y docker-ce docker-ce-cli containerd.io --allow-remove-essential
$STD rm -rf /var/lib/docker /var/lib/containerd
$STD rm -f /etc/apt/sources.list.d/docker.list /etc/apt/keyrings/docker.gpg
msg_ok "Removed Docker"
msg_info "Configuring Nginx as reverse proxy"
$STD rm -f /etc/nginx/sites-enabled/default
cat <<EOF >/etc/nginx/sites-available/huly.conf
server {
listen 80 default_server;
listen [::]:80 default_server;
server_name _; # Replace with your domain if you have one
client_max_body_size 100M; # For file uploads via transactor
# Source the config file to get port variables for nginx
# Note: This is a bash-ism. Nginx doesn't directly execute this.
# Ports must be hardcoded or managed by a script that generates this nginx config.
# For simplicity, using fixed ports here, matching native.conf defaults.
location / {
proxy_pass http://127.0.0.1:3000; # Huly Frontend (FRONT_PORT)
proxy_http_version 1.1;
proxy_set_header Upgrade \$http_upgrade;
proxy_set_header Connection "upgrade"; # For WebSockets if front uses them directly
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto \$scheme;
proxy_read_timeout 300s; # Increase timeout for potentially long operations
proxy_send_timeout 300s;
}
location /account/ {
proxy_pass http://127.0.0.1:3001/; # Huly Account (ACCOUNT_PORT)
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto \$scheme;
}
# Transactor handles general API and WebSocket for collaboration
location /transactor/ {
proxy_pass http://127.0.0.1:3002/; # Huly Transactor (TRANSACTOR_PORT)
proxy_http_version 1.1;
proxy_set_header Upgrade \$http_upgrade; # Essential for WebSocket
proxy_set_header Connection "upgrade"; # Essential for WebSocket
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto \$scheme;
}
# Specific path for uploads if front uses /upload directly to transactor
# Ensure this matches how the frontend makes upload requests.
# If uploads go via /transactor/upload, this specific block might not be needed.
location /upload {
proxy_pass http://127.0.0.1:3002/upload; # Huly Transactor Upload (TRANSACTOR_PORT)
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto \$scheme;
client_max_body_size 100M; # Ensure this is set for uploads
}
location /collaborator/ {
proxy_pass http://127.0.0.1:3078/; # Huly Collaborator (COLLABORATOR_PORT)
proxy_http_version 1.1;
proxy_set_header Upgrade \$http_upgrade; # Essential for WebSocket
proxy_set_header Connection "upgrade"; # Essential for WebSocket
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto \$scheme;
}
location /rekoni/ {
proxy_pass http://127.0.0.1:4004/; # Huly Rekoni (REKONI_PORT)
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto \$scheme;
}
# MinIO Console access (if needed through reverse proxy, otherwise direct access on :9001)
# location /minio-console/ {
# proxy_pass http://127.0.0.1:9001/;
# proxy_set_header Host \$host;
# }
# MinIO API/S3 access (if needed through reverse proxy)
# Usually, services access MinIO directly on localhost:9000
# location /s3/ { # Example path, adjust as needed
# proxy_pass http://127.0.0.1:9000/;
# proxy_set_header Host \$host; # Important for MinIO to work correctly
# }
}
EOF
$STD ln -sf /etc/nginx/sites-available/huly.conf /etc/nginx/sites-enabled/huly.conf
if nginx -t >/dev/null 2>&1; then
$STD systemctl reload nginx
msg_ok "Nginx configured and reloaded."
else
msg_error "Nginx configuration test failed. Please check /etc/nginx/sites-available/huly.conf and nginx logs (journalctl -u nginx)."
# Optionally, print nginx -t output for debugging
nginx -t
fi
# Source the config file to get variables for systemd units
# This ensures systemd units use the values from native.conf
if [ -f /opt/huly-selfhost/native.conf ]; then
source /opt/huly-selfhost/native.conf
else
msg_error "Huly configuration file /opt/huly-selfhost/native.conf not found. Cannot create systemd services correctly."
exit 1
fi
msg_info "Creating systemd services for Huly components and MinIO"
# MinIO service
cat <<EOF >/etc/systemd/system/minio.service
[Unit]
Description=MinIO Object Storage
Documentation=https://docs.min.io
Wants=network-online.target
After=network-online.target
AssertFileIsExecutable=/usr/local/bin/minio
[Service]
WorkingDirectory=/opt/minio
User=minio
Group=minio
EnvironmentFile=/etc/minio/minio.conf # Contains MINIO_ROOT_USER, MINIO_ROOT_PASSWORD, MINIO_VOLUMES, MINIO_OPTS
ExecStartPre=/bin/bash -c "if [ -z \\"\${MINIO_VOLUMES}\\" ]; then echo 'Variable MINIO_VOLUMES not set in /etc/minio/minio.conf'; exit 1; fi"
ExecStart=/usr/local/bin/minio server \$MINIO_OPTS \$MINIO_VOLUMES
Restart=always
LimitNOFILE=1048576
TasksMax=infinity
TimeoutStopSec=infinity # Or a reasonable timeout
SendSIGKILL=no
[Install]
WantedBy=multi-user.target
EOF
# Huly User for services (optional, but good practice)
# useradd -r -s /bin/false hulyuser 2>/dev/null || true
# Frontend service
# Check if the entrypoint is bundle.js or server.js or main.js - adjust ExecStart
# Common for Node.js apps: dist/bundle.js, dist/main.js, server.js, app.js
# Assuming /opt/huly/front/dist/bundle.js from previous observations
FRONT_EXEC_PATH="/opt/huly/front/dist/bundle.js" # Default, adjust if different
if [ ! -f "$FRONT_EXEC_PATH" ] && [ -f "/opt/huly/front/bundle.js" ]; then FRONT_EXEC_PATH="/opt/huly/front/bundle.js"; fi
if [ ! -f "$FRONT_EXEC_PATH" ] && [ -f "/opt/huly/front/main.js" ]; then FRONT_EXEC_PATH="/opt/huly/front/main.js"; fi
if [ ! -f "$FRONT_EXEC_PATH" ] && [ -f "/opt/huly/front/server.js" ]; then FRONT_EXEC_PATH="/opt/huly/front/server.js"; fi
# Add more checks if necessary, or make it configurable
cat <<EOF >/etc/systemd/system/huly-front.service
[Unit]
Description=Huly Frontend Service
After=network.target nginx.service huly-account.service huly-transactor.service huly-rekoni.service huly-collaborator.service
Wants=nginx.service huly-account.service huly-transactor.service huly-rekoni.service huly-collaborator.service
[Service]
Type=simple
WorkingDirectory=/opt/huly/front
ExecStart=/usr/bin/node $FRONT_EXEC_PATH
Restart=always
User=root # Consider 'hulyuser' or 'www-data' if created and permissions allow
EnvironmentFile=/opt/huly-selfhost/native.conf # Sources all needed env vars
# Explicitly set PORT if not in native.conf or to override
Environment="PORT=\${FRONT_PORT:-3000}"
Environment="NODE_ENV=production"
# URLs should be sourced from native.conf or constructed if services are on different hosts
Environment="ACCOUNTS_URL=http://localhost:\${ACCOUNT_PORT:-3001}"
Environment="UPLOAD_URL=http://localhost:\${TRANSACTOR_PORT:-3002}/upload" # Ensure this path is correct
Environment="REKONI_URL=http://localhost:\${REKONI_PORT:-4004}"
Environment="COLLABORATOR_URL=ws://localhost:\${COLLABORATOR_PORT:-3078}" # Note: ws:// for collaborator
StandardOutput=journal
StandardError=journal
[Install]
WantedBy=multi-user.target
EOF
# Account service
ACCOUNT_EXEC_PATH="/opt/huly/account/bundle.js" # Default
if [ ! -f "$ACCOUNT_EXEC_PATH" ] && [ -f "/opt/huly/account/main.js" ]; then ACCOUNT_EXEC_PATH="/opt/huly/account/main.js"; fi
# ... add other checks for account executable
cat <<EOF >/etc/systemd/system/huly-account.service
[Unit]
Description=Huly Account Service
After=network.target mongod.service
Requires=mongod.service
[Service]
Type=simple
WorkingDirectory=/opt/huly/account
ExecStart=/usr/bin/node $ACCOUNT_EXEC_PATH
Restart=always
User=root # Consider 'hulyuser'
EnvironmentFile=/opt/huly-selfhost/native.conf
Environment="PORT=\${ACCOUNT_PORT:-3001}"
Environment="TRANSACTOR_URL=ws://localhost:\${TRANSACTOR_PORT:-3002}" # Ensure ws or wss
StandardOutput=journal
StandardError=journal
[Install]
WantedBy=multi-user.target
EOF
# Transactor service
TRANSACTOR_EXEC_PATH="/opt/huly/transactor/bundle.js" # Default
if [ ! -f "$TRANSACTOR_EXEC_PATH" ] && [ -f "/opt/huly/transactor/main.js" ]; then TRANSACTOR_EXEC_PATH="/opt/huly/transactor/main.js"; fi
# ... add other checks for transactor executable
cat <<EOF >/etc/systemd/system/huly-transactor.service
[Unit]
Description=Huly Transactor Service
After=network.target mongod.service minio.service huly-account.service
Requires=mongod.service minio.service huly-account.service
[Service]
Type=simple
WorkingDirectory=/opt/huly/transactor
ExecStart=/usr/bin/node $TRANSACTOR_EXEC_PATH
Restart=always
User=root # Consider 'hulyuser'
EnvironmentFile=/opt/huly-selfhost/native.conf
Environment="PORT=\${TRANSACTOR_PORT:-3002}"
# STORAGE_CONFIG needs careful construction based on native.conf values
Environment="STORAGE_CONFIG=minio|minio?accessKey=\${MINIO_ACCESS_KEY}&secretKey=\${MINIO_SECRET_KEY}&endpoint=\${MINIO_ENDPOINT}&secure=false" # Assuming http for minio
Environment="ACCOUNTS_URL=http://localhost:\${ACCOUNT_PORT:-3001}"
Environment="REKONI_URL=http://localhost:\${REKONI_PORT:-4004}"
StandardOutput=journal
StandardError=journal
[Install]
WantedBy=multi-user.target
EOF
# Collaborator service
COLLABORATOR_EXEC_PATH="/opt/huly/collaborator/dist/bundle.js" # Default
if [ ! -f "$COLLABORATOR_EXEC_PATH" ] && [ -f "/opt/huly/collaborator/bundle.js" ]; then COLLABORATOR_EXEC_PATH="/opt/huly/collaborator/bundle.js"; fi
# ... add other checks for collaborator executable
cat <<EOF >/etc/systemd/system/huly-collaborator.service
[Unit]
Description=Huly Collaborator Service
After=network.target mongod.service
Requires=mongod.service
[Service]
Type=simple
WorkingDirectory=/opt/huly/collaborator
ExecStart=/usr/bin/node $COLLABORATOR_EXEC_PATH
Restart=always
User=root # Consider 'hulyuser'
EnvironmentFile=/opt/huly-selfhost/native.conf
Environment="PORT=\${COLLABORATOR_PORT:-3078}"
StandardOutput=journal
StandardError=journal
[Install]
WantedBy=multi-user.target
EOF
# Rekoni service
# Rekoni is Python, ExecStart uses the determined $REKONI_PYTHON_EXEC
# Ensure main.py (or equivalent) is the entrypoint in /opt/huly/rekoni/
REKONI_ENTRYPOINT="main.py" # Default, ensure this file exists in /opt/huly/rekoni
if [ ! -f "/opt/huly/rekoni/$REKONI_ENTRYPOINT" ]; then
msg_warn "Rekoni entrypoint $REKONI_ENTRYPOINT not found in /opt/huly/rekoni/. Service might fail."
# Potentially look for app.py or other common Python entrypoints if main.py is missing
fi
cat <<EOF >/etc/systemd/system/huly-rekoni.service
[Unit]
Description=Huly Rekoni Service (File Processing)
After=network.target mongod.service minio.service
Requires=mongod.service minio.service
[Service]
Type=simple
WorkingDirectory=/opt/huly/rekoni
ExecStart=$REKONI_PYTHON_EXEC $REKONI_ENTRYPOINT
Restart=always
User=root # Consider 'hulyuser'
EnvironmentFile=/opt/huly-selfhost/native.conf
Environment="PORT=\${REKONI_PORT:-4004}"
Environment="NLTK_DATA=${NLTK_DATA_DIR}" # Pass NLTK data path
# Add other Rekoni specific ENV VARS if any, e.g. OPENAI_API_KEY if used and not in native.conf
StandardOutput=journal
StandardError=journal
[Install]
WantedBy=multi-user.target
EOF
$STD systemctl daemon-reload
# Enable services (start them on boot)
$STD systemctl enable minio huly-front huly-account huly-transactor huly-collaborator huly-rekoni mongod nginx
# Start services now
$STD systemctl start minio mongod # Start dependencies first
# Brief pause to allow DB/MinIO to initialize before dependent services start
sleep 5
$STD systemctl start nginx huly-account # Account needs Mongo
sleep 2
$STD systemctl start huly-transactor huly-rekoni # These need Account, Mongo, Minio
sleep 2
$STD systemctl start huly-collaborator # Needs Mongo
sleep 2
$STD systemctl start huly-front # Front needs all backends
msg_ok "Created, enabled, and started Huly services and dependencies"
# Create version file to track installations and updates
echo "$(date '+%Y-%m-%d %H:%M:%S') - Initial Install of Huly Native" >/opt/huly-selfhost/version.txt
motd_ssh
customize
# Cleanup
msg_info "Cleaning up apt cache and unused packages"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned up"
echo -e "${INFO}${YW} Huly native installation completed successfully!${CL}"
echo -e "${INFO}${YW} Access URL: http://$SERVER_IP (or your configured domain)${CL}"
echo -e "${INFO}${YW} Services may take a few moments to fully initialize.${CL}"
echo -e ""
echo -e "${INFO}${YW} Service Status Commands:${CL}"
echo -e "${INFO}${YW} • Check all Huly services: systemctl status 'huly-*' minio mongod nginx ${CL}"
echo -e "${INFO}${YW} • Example: systemctl status huly-front ${CL}"
echo -e "${INFO}${YW} • View logs: journalctl -u <service_name> -f (e.g., journalctl -u huly-front -f)${CL}"
echo -e "${INFO}${YW} • MinIO Console: http://$SERVER_IP:9001 (User: ${MINIO_ACCESS_KEY}, Pass: ${MINIO_SECRET_KEY})${CL}"
echo -e ""
echo -e "${INFO}${YW} IMPORTANT: Review credentials in ~/huly.creds and store them securely.${CL}"
echo -e "${INFO}${YW} Configuration is in: /opt/huly-selfhost/native.conf${CL}"
echo -e "${INFO}${YW} Rekoni Python executable: $REKONI_PYTHON_EXEC ${CL}"

View File

@ -0,0 +1,96 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: GitHub Copilot
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/Dictionarry-Hub/profilarr
# App Default Values
APP="Profilarr"
var_tags="arr;automation"
var_cpu="2"
var_ram="2048"
var_disk="8"
var_os="debian"
var_version="12"
var_unprivileged="1"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
# Check if installation is present
if [[ ! -d /opt/${APP}/backend ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
# Crawling the new version and checking whether an update is required
RELEASE=$(curl -fsSL https://api.github.com/repos/Dictionarry-Hub/profilarr/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4)}')
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
# Stopping Services
msg_info "Stopping $APP"
systemctl stop profilarr
msg_ok "Stopped $APP"
# Creating Backup
msg_info "Creating Backup"
tar -czf "/opt/${APP}_backup_$(date +%F).tar.gz" /opt/${APP} /opt/${APP}_config
msg_ok "Backup Created"
# Execute Update
msg_info "Updating $APP to v${RELEASE}"
temp_file=$(mktemp)
curl -fsSL -o "$temp_file" "https://github.com/Dictionarry-Hub/profilarr/archive/refs/tags/v${RELEASE}.zip"
cd /tmp
unzip -q "$temp_file"
rm -rf /opt/${APP}/backend /opt/${APP}/frontend
mv "profilarr-${RELEASE}/backend" /opt/${APP}/
mv "profilarr-${RELEASE}/frontend" /opt/${APP}/
# Update Python dependencies
cd /opt/${APP}/backend
/opt/${APP}/venv/bin/pip install -r requirements.txt
# Build frontend
cd /opt/${APP}/frontend
npm install
npm run build
cp -r dist/* /opt/${APP}/backend/app/static/
msg_ok "Updated $APP to v${RELEASE}"
# Starting Services
msg_info "Starting $APP"
systemctl start profilarr
msg_ok "Started $APP"
# Cleaning up
msg_info "Cleaning Up"
rm -f "$temp_file"
rm -rf "/tmp/profilarr-${RELEASE}"
msg_ok "Cleanup Completed"
# Last Action
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:6868${CL}"

View File

@ -1,38 +0,0 @@
{
"name": "Huly",
"slug": "huly",
"categories": [4],
"date_created": "2025-06-14",
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 80,
"documentation": "https://github.com/hcengineering/huly-selfhost",
"website": "https://github.com/hcengineering/huly-selfhost",
"logo": "https://raw.githubusercontent.com/hcengineering/huly-selfhost/master/logo.png",
"description": "Self-hosted collaboration platform with chat, video calls, and AI-powered features. Native installation without Docker.",
"install_methods": [
{
"type": "default",
"script": "ct/huly.sh",
"resources": {
"cpu": 2,
"ram": 4096,
"hdd": 20,
"os": "debian",
"version": "12"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": [
"Huly is resource-intensive. At least 2 vCPUs and 4GB RAM are recommended.",
"This installation runs Huly natively without Docker containers.",
"Includes MongoDB, MinIO object storage, and all Huly microservices.",
"Services: Frontend (3000), Account (3001), Transactor (3002), Collaborator (3078), Rekoni (4004)",
"MinIO console available at port 9001 (minioadmin/minioadmin)"
]
}