furst push
All checks were successful
Build, Push, Publish / Build & Release (push) Successful in 26s

This commit is contained in:
2026-01-21 20:28:24 -03:00
commit 23ebc23525
5 changed files with 1494 additions and 0 deletions

View File

@@ -0,0 +1,384 @@
name: Build, Push, Publish
on:
push:
branches:
- main
workflow_dispatch:
workflow_run:
workflows: ["Sync Repo"]
types:
- completed
jobs:
release:
name: Build & Release
runs-on: ubuntu-latest
container:
image: catthehacker/ubuntu:act-latest
permissions:
contents: write
packages: write
steps:
- name: 📥 Checkout code with full history and tags
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check for Dockerargs and Upstream Updates
id: check_upstream
run: |
if [ -f Dockerargs ]; then
echo "Dockerargs found. Checking upstream..."
echo "Dockerargs found. Checking upstream..."
# Parse repo info using awk to avoid git config restrictions on underscores in keys elsewhere in the file
REPO_URL=$(awk -F '=' '/\[repo\]/{flag=1; next} /\[/{flag=0} flag && /^url=/{print $2}' Dockerargs | tr -d ' \r\n')
REPO_BRANCH=$(awk -F '=' '/\[repo\]/{flag=1; next} /\[/{flag=0} flag && /^branch=/{print $2}' Dockerargs | tr -d ' \r\n')
if [ -z "$REPO_BRANCH" ]; then REPO_BRANCH="main"; fi
# Fetch upstream SHA
if [ -n "$REPO_URL" ]; then
UPSTREAM_SHA=$(git ls-remote "$REPO_URL" "$REPO_BRANCH" | awk '{ print $1 }' | head -c 7)
echo "Upstream SHA: $UPSTREAM_SHA"
if [ -f manifest.json ]; then
LOCAL_SHA=$(jq -r '.upstream_sha // empty' manifest.json)
else
LOCAL_SHA=""
fi
if [ "$LOCAL_SHA" != "$UPSTREAM_SHA" ]; then
echo "Upstream changed ($LOCAL_SHA -> $UPSTREAM_SHA)."
echo "upstream_needs_update=true" >> "$GITHUB_OUTPUT"
echo "upstream_sha=$UPSTREAM_SHA" >> "$GITHUB_OUTPUT"
echo "repo_url=$REPO_URL" >> "$GITHUB_OUTPUT"
echo "repo_branch=$REPO_BRANCH" >> "$GITHUB_OUTPUT"
else
echo "Upstream up to date."
echo "upstream_needs_update=false" >> "$GITHUB_OUTPUT"
fi
# Parse Build Args
echo "Parsing [args] from Dockerargs..."
ARGS_CONTENT=$(sed -n '/^\[args\]/,/^\[/p' Dockerargs | grep -v '^\[' | grep '=' || true)
if [ -n "$ARGS_CONTENT" ]; then
echo "Found args:"
echo "$ARGS_CONTENT"
echo "build_args<<EOF" >> "$GITHUB_OUTPUT"
echo "$ARGS_CONTENT" >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
else
echo "No args found."
echo "build_args=" >> "$GITHUB_OUTPUT"
fi
else
echo "Repo URL not found in Dockerargs."
echo "upstream_needs_update=false" >> "$GITHUB_OUTPUT"
echo "build_args=" >> "$GITHUB_OUTPUT"
fi
else
echo "No Dockerargs found."
echo "upstream_needs_update=false" >> "$GITHUB_OUTPUT"
echo "build_args=" >> "$GITHUB_OUTPUT"
fi
- name: Check if any tags exist
id: check_tags_exist
run: |
git fetch --tags
TAG_COUNT=$(git tag | wc -l)
if [ "$TAG_COUNT" -eq 0 ]; then
echo "has_tags=false" >> "$GITHUB_OUTPUT"
echo "latest_tag=v0.0.0" >> "$GITHUB_OUTPUT"
else
echo "has_tags=true" >> "$GITHUB_OUTPUT"
LATEST_TAG=$(git describe --tags --abbrev=0)
echo "latest_tag=$LATEST_TAG" >> "$GITHUB_OUTPUT"
fi
- name: Check if meaningful commits exist since latest tag
id: check_commits
run: |
UPSTREAM_UPDATE="${{ steps.check_upstream.outputs.upstream_needs_update }}"
if [ "$UPSTREAM_UPDATE" == "true" ]; then
echo "commit_count=1" >> "$GITHUB_OUTPUT"
echo "changed_files=Upstream Update to ${{ steps.check_upstream.outputs.upstream_sha }}" >> "$GITHUB_OUTPUT"
elif [ "${{ steps.check_tags_exist.outputs.has_tags }}" = "false" ]; then
# No tags exist, so we should create first release
echo "commit_count=1" >> "$GITHUB_OUTPUT"
CHANGED_FILES=$(git ls-files | grep -v '^manifest.json$' || true)
if [ -n "$CHANGED_FILES" ]; then
echo "changed_files<<EOF" >> "$GITHUB_OUTPUT"
printf '%s\n' "$CHANGED_FILES" >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
else
echo "changed_files=Initial release" >> "$GITHUB_OUTPUT"
fi
else
LATEST_TAG="${{ steps.check_tags_exist.outputs.latest_tag }}"
CHANGED_FILES="$(git diff --name-only "${LATEST_TAG}..HEAD" | grep -v '^manifest.json$' || true)"
if [ -n "$CHANGED_FILES" ]; then
echo "commit_count=1" >> "$GITHUB_OUTPUT"
echo "changed_files<<EOF" >> "$GITHUB_OUTPUT"
printf '%s\n' "$CHANGED_FILES" >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
else
echo "commit_count=0" >> "$GITHUB_OUTPUT"
fi
fi
- name: Get latest release tag (from Gitea API)
id: get_latest_release
run: |
# Using Gitea API
LATEST_RELEASE_TAG=$(curl -sL -H "Accept: application/json" \
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
"${{ gitea.api_url }}/repos/${{ gitea.repository }}/releases/latest" | jq -r .tag_name)
if [ -z "$LATEST_RELEASE_TAG" ] || [ "$LATEST_RELEASE_TAG" = "null" ]; then
LATEST_RELEASE_TAG="v1.0.0"
fi
echo "latest_release_tag=$LATEST_RELEASE_TAG" >> "$GITHUB_OUTPUT"
echo "latest_release_version=${LATEST_RELEASE_TAG#v}" >> "$GITHUB_OUTPUT"
# -------------------------------
# Sync manifest.json to last release version if behind (only when no meaningful commits)
# -------------------------------
- name: 🛠 Ensure manifest.json matches latest release version
if: steps.check_commits.outputs.commit_count == '0'
run: |
if [ -f manifest.json ]; then
MANIFEST_VERSION=$(jq -r '.version // empty' manifest.json)
else
MANIFEST_VERSION=""
fi
LATEST_RELEASE_VERSION="${{ steps.get_latest_release.outputs.latest_release_version }}"
PYTHON_CODE="from packaging import version; \
print(version.parse('$LATEST_RELEASE_VERSION') > version.parse('$MANIFEST_VERSION') if '$MANIFEST_VERSION' else True)"
# Python3 is available in catthehacker/ubuntu:act-latest
NEED_UPDATE=$(python3 -c "$PYTHON_CODE")
if [ "$NEED_UPDATE" = "True" ]; then
echo "Updating manifest.json to version $LATEST_RELEASE_VERSION (sync with release)"
jq --arg v "$LATEST_RELEASE_VERSION" '.version = $v' manifest.json > tmp.json && mv tmp.json manifest.json
git config user.name "Gitea Actions"
git config user.email "actions@git.icc.gg"
git add manifest.json
git commit -m "Sync manifest.json to release $LATEST_RELEASE_VERSION [🔄]" || echo "Nothing to commit"
git push origin main || true
else
echo "Manifest.json is already up-to-date with the latest release."
fi
# -------------------------------
# Continue normal workflow if commits exist
# -------------------------------
- name: 📃 Get list of changed files (Markdown bullet list)
if: steps.check_commits.outputs.commit_count != '0'
id: changed_files
run: |
BULLET_LIST="$(printf '%s\n' "${{ steps.check_commits.outputs.changed_files }}" | sed 's/^/- /')"
echo "CHANGED<<EOF" >> "$GITHUB_OUTPUT"
printf '%s\n' "$BULLET_LIST" >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
COUNT="$(printf '%s\n' "${{ steps.check_commits.outputs.changed_files }}" | wc -l)"
echo "COUNT=$COUNT" >> "$GITHUB_OUTPUT"
- name: Get manifest version
if: steps.check_commits.outputs.commit_count != '0'
id: get_manifest_version
run: |
if [ -f manifest.json ]; then
MANIFEST_VERSION=$(jq -r '.version // empty' manifest.json)
if [ -z "$MANIFEST_VERSION" ] || [ "$MANIFEST_VERSION" = "null" ]; then
MANIFEST_VERSION="1.0.0"
fi
else
MANIFEST_VERSION="1.0.0"
fi
echo "manifest_version=$MANIFEST_VERSION" >> "$GITHUB_OUTPUT"
- name: Pick base version
if: steps.check_commits.outputs.commit_count != '0'
id: pick_base_version
run: |
LATEST_RELEASE="${{ steps.get_latest_release.outputs.latest_release_version }}"
MANIFEST="${{ steps.get_manifest_version.outputs.manifest_version }}"
BASE_VERSION=$(python3 -c "from packaging import version; \
print(str(max(version.parse('$LATEST_RELEASE'), version.parse('$MANIFEST'))))")
echo "base_version=$BASE_VERSION" >> "$GITHUB_OUTPUT"
- name: 🔢 Determine version
if: steps.check_commits.outputs.commit_count != '0'
id: version
run: |
BASE_VERSION="${{ steps.pick_base_version.outputs.base_version }}"
MAJOR=$(echo "$BASE_VERSION" | cut -d. -f1)
MINOR=$(echo "$BASE_VERSION" | cut -d. -f2)
PATCH=$(echo "$BASE_VERSION" | cut -d. -f3)
COUNT="${{ steps.changed_files.outputs.COUNT }}"
if [ "$COUNT" -ge 5 ]; then
MAJOR=$((MAJOR + 1))
MINOR=0
PATCH=0
elif [ "$COUNT" -ge 3 ]; then
MINOR=$((MINOR + 1))
PATCH=0
else
PATCH=$((PATCH + 1))
fi
NEW_VERSION="${MAJOR}.${MINOR}.${PATCH}"
REPO_NAME="$(basename "$GITHUB_REPOSITORY")"
ZIP_NAME="${REPO_NAME}-${NEW_VERSION}.zip"
echo "VERSION=$NEW_VERSION" >> "$GITHUB_OUTPUT"
echo "ZIP_NAME=$ZIP_NAME" >> "$GITHUB_OUTPUT"
echo "REPO_NAME=$REPO_NAME" >> "$GITHUB_OUTPUT"
- name: 🛠 Update or create manifest.json
if: steps.check_commits.outputs.commit_count != '0'
run: |
VERSION="${{ steps.version.outputs.VERSION }}"
AUTHOR="Ivan Carlos"
VERSION_FILE="manifest.json"
UPSTREAM_SHA="${{ steps.check_upstream.outputs.upstream_sha }}"
if [ -f "$VERSION_FILE" ]; then
jq --arg v "$VERSION" \
--arg a "$AUTHOR" \
--arg u "$UPSTREAM_SHA" \
'.version = $v | .author = $a | if $u != "" and $u != null then .upstream_sha = $u else . end' \
"$VERSION_FILE" > tmp.json && mv tmp.json "$VERSION_FILE"
else
echo "{ \"version\": \"$VERSION\", \"author\": \"$AUTHOR\", \"upstream_sha\": \"$UPSTREAM_SHA\" }" > "$VERSION_FILE"
fi
- name: 💾 Commit and push updated manifest.json
if: steps.check_commits.outputs.commit_count != '0'
run: |
git config user.name "Gitea Actions"
git config user.email "actions@git.icc.gg"
git add manifest.json
git commit -m "Update manifest version to ${{ steps.version.outputs.VERSION }} [▶️]" || echo "Nothing to commit"
git push origin main
- name: 🛠 Install zip
if: steps.check_commits.outputs.commit_count != '0'
run: |
apt-get update && apt-get install -y zip
- name: 📦 Create ZIP package (excluding certain files)
if: steps.check_commits.outputs.commit_count != '0'
run: |
ZIP_NAME="${{ steps.version.outputs.ZIP_NAME }}"
zip -r "$ZIP_NAME" . -x ".git/*" ".github/*" "docker/*" ".dockerignore" "CNAME" "Dockerfile" "README.md" "LICENSE" ".gitea/*"
- name: 🚀 Create Gitea Release
if: steps.check_commits.outputs.commit_count != '0'
id: create_release
env:
CHANGELOG_LIST: ${{ steps.changed_files.outputs.CHANGED }}
run: |
TAG_NAME="v${{ steps.version.outputs.VERSION }}"
RELEASE_NAME="${{ steps.version.outputs.REPO_NAME }} v${{ steps.version.outputs.VERSION }}"
# Construct Markdown body safely using env var
# We use printf to avoid interpreting backslashes in the file list
BODY=$(printf "### Changelog\nFiles changed in this release:\n%s" "$CHANGELOG_LIST")
# Create JSON payload using jq
jq -n \
--arg tag_name "$TAG_NAME" \
--arg name "$RELEASE_NAME" \
--arg body "$BODY" \
'{tag_name: $tag_name, name: $name, body: $body, draft: false, prerelease: false}' > release_payload.json
echo "DEBUG: Generated Payload:"
cat release_payload.json
# Create Release
curl -s -X POST "${{ gitea.api_url }}/repos/${{ gitea.repository }}/releases" \
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
-H "Content-Type: application/json" \
-d @release_payload.json > api_response.json
echo "DEBUG: API Response:"
cat api_response.json || true
RELEASE_ID=$(jq -r .id api_response.json)
echo "RELEASE_ID=$RELEASE_ID" >> "$GITHUB_OUTPUT"
if [ "$RELEASE_ID" == "null" ] || [ -z "$RELEASE_ID" ]; then
echo "Failed to create release. Response content:"
cat api_response.json
exit 1
fi
- name: 📤 Upload Release Asset
if: steps.check_commits.outputs.commit_count != '0'
run: |
RELEASE_ID="${{ steps.create_release.outputs.RELEASE_ID }}"
ZIP_NAME="${{ steps.version.outputs.ZIP_NAME }}"
FILE_PATH="./$ZIP_NAME"
curl --fail -s -X POST "${{ gitea.api_url }}/repos/${{ gitea.repository }}/releases/$RELEASE_ID/assets?name=$ZIP_NAME" \
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
-H "Content-Type: application/zip" \
--data-binary @"$FILE_PATH"
# ----- Docker steps -----
- name: Clone Upstream Code (if needed)
if: steps.check_commits.outputs.commit_count != '0' && (steps.check_upstream.outputs.upstream_needs_update == 'true' || steps.check_upstream.outputs.repo_url != '')
run: |
rm -rf upstream_src
git clone --depth 1 --branch ${{ steps.check_upstream.outputs.repo_branch }} ${{ steps.check_upstream.outputs.repo_url }} upstream_src
- name: 🔍 Check if Dockerfile exists
if: steps.check_commits.outputs.commit_count != '0' || steps.check_upstream.outputs.upstream_needs_update == 'true'
id: dockerfile_check
run: |
if [ -n "${{ steps.check_upstream.outputs.repo_url }}" ]; then
if [ -f upstream_src/Dockerfile ]; then
echo "exists=true" >> "$GITHUB_OUTPUT"
else
# Fallback or error? User said "ignore", but we need a dockerfile to build.
# Assuming if upstream_src is present, we trust it, or fail at build time.
# Let's say exists=true and let build fail if missing, per user hint.
echo "exists=true" >> "$GITHUB_OUTPUT"
fi
elif [ -f Dockerfile ]; then
echo "exists=true" >> "$GITHUB_OUTPUT"
else
echo "exists=false" >> "$GITHUB_OUTPUT"
fi
- name: 🔐 Login to Gitea Container Registry
if: steps.check_commits.outputs.commit_count != '0' && steps.dockerfile_check.outputs.exists == 'true'
uses: docker/login-action@v3
with:
registry: git.icc.gg
username: ${{ gitea.actor }}
password: ${{ secrets.CR_PAT }}
- name: 🛠 Set up QEMU
if: steps.check_commits.outputs.commit_count != '0' && steps.dockerfile_check.outputs.exists == 'true'
uses: docker/setup-qemu-action@v3
- name: 🛠 Set up Docker Buildx
if: steps.check_commits.outputs.commit_count != '0' && steps.dockerfile_check.outputs.exists == 'true'
uses: docker/setup-buildx-action@v3
- name: 🐳 Build and Push Docker image
if: steps.check_commits.outputs.commit_count != '0' && steps.dockerfile_check.outputs.exists == 'true'
uses: docker/build-push-action@v5
id: docker_build
with:
context: ${{ steps.check_upstream.outputs.repo_url != '' && './upstream_src' || '.' }}
platforms: linux/amd64,linux/arm64
file: ${{ steps.check_upstream.outputs.repo_url != '' && './upstream_src/Dockerfile' || './Dockerfile' }}
push: true
build-args: |
${{ steps.check_upstream.outputs.build_args }}
tags: |
git.icc.gg/${{ gitea.repository }}:latest
git.icc.gg/${{ gitea.repository }}:${{ steps.version.outputs.VERSION }}

View File

@@ -0,0 +1,59 @@
name: Update README
permissions:
contents: write
on:
workflow_dispatch:
schedule:
- cron: "0 4 * * *" # Every day at 4 AM UTC
jobs:
update-readme:
runs-on: ubuntu-latest
container:
image: catthehacker/ubuntu:act-latest
env:
SOURCE_REPO: ivancarlos/.gitea
SOURCE_BRANCH: main
steps:
- name: Checkout current repository
uses: actions/checkout@v4
- name: Checkout source README template
uses: actions/checkout@v4
with:
repository: ${{ env.SOURCE_REPO }}
ref: ${{ env.SOURCE_BRANCH }}
token: ${{ secrets.CR_PAT }}
path: source_readme
- name: Update README.md (footer only)
run: |
set -e
# --- Extract footer block from source (everything from <!-- footer --> onward) ---
FOOTER=$(awk '/<!-- footer -->/{flag=1}flag' source_readme/README.md)
# --- Replace everything after <!-- footer --> with FOOTER ---
awk -v footer="$FOOTER" '
/<!-- footer -->/ {
print footer
found=1
exit
}
{ print }
' README.md > README.tmp && mv README.tmp README.md
- name: Remove source_readme from git index
run: rm -rf source_readme
- name: Commit and push changes
run: |
git config user.name "Gitea Actions"
git config user.email "actions@git.icc.gg"
git add README.md
git commit -m "Sync README from template [▶️]" || echo "Nothing to commit"
git push origin ${{ github.ref_name }}

87
README.md Executable file
View File

@@ -0,0 +1,87 @@
YOURLS Plugin: Upload and Shorten
=================================
Plugin for [YOURLS](http://yourls.org) (version 1.7 or newer)
Description
-----------
This plugin lets you upload a file to your webserver and automagically creates a YOURLS short-URL for it. Then you can share that file by its short link as well as its full URL.
Features
--------
* Different ways to change the filename during the upload
* Make a note about it for yourself in the YOURLS database (by default the title field is filled with the original filename and the alteration method)
* Keep track of views/downloads via YOURLS´s history function
* Localization support (currently: French, English, Spanish, German and Simplified Chinese. More translations provided by volounteers are greatly appreciated.)
Requirements
------------
What you need:
* A webserver with PHP support
* A functional installation of [YOURLS](http://yourls.org)
* This Plugin ;-)
* A bit of understanding what it does and what you can do with it ;-)
Installation
------------
* Navigate to the folder `./user/plugins/` inside your YOURLS-install directory
* Use any of these two ways to install:
- **Either** clone this repo using `git`
- **or** create a new folder named ´Upload-and-Shorten´, then download all files from here *into that directory*.
* Prepare your configuration:
* If necessary create a directory where your files can be accessed from the webserver (i.e '/full/path/to/httpd/directory/')
* Depending on your webserver´s setup you may have to modify the permissions of that directory:
- Make sure your webserver has read+write permissions for it. Explaining that is beyond the scope of this readme, please refer to the manual of your server, operating system or hosting provider. On a Linux box something like
`chown :www-data /full/path/to/httpd/directory && chmod g+rwx /full/path/to/httpd/directory`
should do the trick, but please don't rely on it.
**A correct server configuration is important for its functionality, but essential for its safety!**
* Now open `./user/config.php` in your YOURLS-directory with any text editor and ...
- add these definition lines and save the file:
`# Paths for plugin: "Upload-and-Shorten":`
`# The web URL path where YOURLS short-links will redirect to:`
`define( 'SHARE_URL', 'http://my.domain.tld/directory/' );`
`# The physical path where the plugin drops your files into:`
`define( 'SHARE_DIR', '/full/path/to/httpd/directory/' );`
(Adjust paths to your needs...)
* Go to the Plugins Administration Page (eg. `http://sho.rt/admin/plugins.php`) and activate the plugin.
* Have fun!
* Consider helping with translations.
Bugs & Issues
-------------
No critical misbehaviour known, most issues are caused by configuration errors.
Beware of scripts and plugins which validate URLs or intercept the data flow. ~~Namely the plugin "Check URL" can interfere with this plugin,~~ (This issue has been fixed for basic setups, see [issue #11](https://github.com/fredl99/YOURLS-Upload-and-Shorten/issues/11).) However, there might still occur interferences with plugins which check target URLs or manipulate the database by themselves. So, when you notice a strange behaviour always think about this and if you report an issue please include a list of installed and activated plugins.
Localization (l10n)
--------------------
This plugin supports **localization** (translations into your language).
**For this to work you need at least YOURLS v1.7 from March 1, 2015**. It will basically work fine with earlier versions, except that translations won't work because of a minor bug in the YOURLS-code. Just upgrade to the latest YOURLS version and it will do.
The default language is English. Translation files for French, German, Spanish and Simplified Chinese are included in the folder `l10n/`. To use this feature you just have to define your locale in `user/config.php` like this:
`define( 'YOURLS_LANG', 'de_DE' );`
(can be found within the standard YOURLS options there)
Looking for translators
-----------------------
If you're willing to provide translations, please [read this](http://blog.yourls.org/2013/02/workshop-how-to-create-your-own-translation-file-for-yourls/). If necessary you can contact me for further instructions. Any help is appreciated, at most by your fellow countrymen!
Donations
---------
There are many ways to integrate this plugin into your daily routines. The more you use it the more you will discover. The more you discover the more you will like it.
If you do, remember someone spends his time for improving it. If you want say thanks for that, just [buy him a coffee](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=H5B9UKVYP88X4). That will certainly motivate him to make further enhancements. Just for You! ...
![](https://s.fredls.net/wjotnlsc1igvzq) and him :)
License
-------
**Free for personal use only.**
If you want to make money with it you have to contact me first.
Thanks for your attention.

BIN
aws.phar Executable file

Binary file not shown.

964
plugin.php Executable file
View File

@@ -0,0 +1,964 @@
<?php
/*
Plugin Name: ICC Upload & Shorten
Plugin URI: https://github.com/ivancarlosti/YOURLS-Upload-and-Shorten
Description: Upload a file locally or to AWS S3 and create a short-YOURL for it in one step.
Version: 1.0
Author: Ivan Carlos
Author URI: https://github.com/ivancarlosti
*/
// No direct call
if (!defined('YOURLS_ABSPATH'))
die();
// Register our plugin admin page
yourls_add_action('plugins_loaded', 'icc_upload_and_shorten_add_page');
yourls_add_action('admin_init', 'icc_upload_and_shorten_handle_ajax');
yourls_add_action('load-plugins_page_icc_upload_and_shorten', 'icc_upload_and_shorten_cleanup_temp'); // Run cleanup on plugin page load
function icc_upload_and_shorten_add_page()
{
// create entry in the admin's plugin menu
yourls_register_plugin_page('icc_upload_and_shorten', 'Upload & Shorten', 'icc_upload_and_shorten_do_page');
}
// Handle AJAX requests for chunked uploads
function icc_upload_and_shorten_handle_ajax()
{
if (isset($_POST['action']) && ($_POST['action'] == 'icc_upload_chunk' || $_POST['action'] == 'icc_upload_finish')) {
// Ensure user is authenticated
if (!yourls_is_valid_user()) {
echo json_encode(['status' => 'error', 'message' => 'Authentication failed']);
die();
}
if ($_POST['action'] == 'icc_upload_chunk') {
icc_upload_and_shorten_handle_chunk();
}
if ($_POST['action'] == 'icc_upload_finish') {
icc_upload_and_shorten_handle_finish();
}
}
}
function icc_upload_and_shorten_handle_chunk()
{
$nonce = $_POST['nonce'] ?? '';
if (!yourls_verify_nonce('icc_upload_chunk', $nonce)) {
echo json_encode(['status' => 'error', 'message' => 'Security check failed']);
die();
}
if (!isset($_FILES['file_chunk']) || $_FILES['file_chunk']['error'] != UPLOAD_ERR_OK) {
echo json_encode(['status' => 'error', 'message' => 'Upload error']);
die();
}
$upload_id = preg_replace('/[^a-zA-Z0-9_]/', '', $_POST['upload_id']);
$temp_dir = yourls_get_option('icc_upload_share_dir');
if (!$temp_dir)
$temp_dir = sys_get_temp_dir();
// Create a temp directory for this upload
$target_dir = rtrim($temp_dir, '/') . '/icc_temp_' . $upload_id;
if (!is_dir($target_dir))
mkdir($target_dir, 0755, true);
$chunk_index = intval($_POST['chunk_index']);
$target_file = $target_dir . '/part_' . $chunk_index;
if (move_uploaded_file($_FILES['file_chunk']['tmp_name'], $target_file)) {
echo json_encode(['status' => 'success']);
} else {
echo json_encode(['status' => 'error', 'message' => 'Failed to move chunk']);
}
die();
}
function icc_upload_and_shorten_handle_finish()
{
$nonce = $_POST['nonce'] ?? '';
if (!yourls_verify_nonce('icc_upload_chunk', $nonce)) {
echo json_encode(['status' => 'error', 'message' => 'Security check failed']);
die();
}
$upload_id = preg_replace('/[^a-zA-Z0-9_]/', '', $_POST['upload_id']);
$file_name = $_POST['file_name'];
$temp_dir = yourls_get_option('icc_upload_share_dir');
if (!$temp_dir)
$temp_dir = sys_get_temp_dir();
$target_dir = rtrim($temp_dir, '/') . '/icc_temp_' . $upload_id;
$final_file_path = $target_dir . '/' . $file_name;
// Assemble chunks
if ($fp = fopen($final_file_path, 'wb')) {
$chunks = glob($target_dir . '/part_*');
natsort($chunks);
foreach ($chunks as $chunk) {
$chunk_content = file_get_contents($chunk);
fwrite($fp, $chunk_content);
unlink($chunk);
}
fclose($fp);
rmdir($target_dir); // Remove temp dir
// now process the file
// Pass essential POST data for filename conversion if needed
$result = icc_upload_and_shorten_process_upload($final_file_path, $file_name);
// Since the result is HTML string, we might want to return it or parse it
// But for this AJAX response we return it in message
echo json_encode(['status' => 'success', 'message' => $result]);
} else {
echo json_encode(['status' => 'error', 'message' => 'Failed to assemble file']);
}
die();
}
// Display admin page
function icc_upload_and_shorten_do_page()
{
// Check if a form was submitted
if (isset($_POST['action']) && $_POST['action'] == 'icc_upload_and_shorten_save') {
icc_upload_and_shorten_update_settings();
}
// Handle Deletion
if (isset($_POST['action']) && $_POST['action'] == 'delete_local_file' && isset($_POST['file_name'])) {
$nonce = $_POST['nonce'] ?? '';
if (yourls_verify_nonce('icc_delete_local_file', $nonce)) {
$share_dir = yourls_get_option('icc_upload_share_dir');
$file_name = $_POST['file_name'];
// Validating filename to prevent directory traversal
if (basename($file_name) == $file_name) {
$file_path = rtrim($share_dir, '/') . '/' . $file_name;
if (file_exists($file_path)) {
if (unlink($file_path)) {
echo "<div class='updated'>File deleted successfully: " . htmlspecialchars($file_name) . "</div>";
} else {
echo "<div class='error'>Failed to delete file. Check permissions.</div>";
}
} else {
echo "<div class='error'>File not found.</div>";
}
} else {
echo "<div class='error'>Invalid filename.</div>";
}
} else {
echo "<div class='error'>Security check failed.</div>";
}
}
if (isset($_POST['action']) && $_POST['action'] == 'delete_file' && isset($_POST['file_key'])) {
$nonce = $_POST['nonce'] ?? '';
if (yourls_verify_nonce('icc_delete_file', $nonce)) {
try {
$s3_key = yourls_get_option('icc_upload_s3_key');
$s3_secret = yourls_get_option('icc_upload_s3_secret');
$s3_region = yourls_get_option('icc_upload_s3_region');
$s3_bucket = yourls_get_option('icc_upload_s3_bucket');
$s3 = icc_get_aws_client($s3_key, $s3_secret, $s3_region);
if ($s3) {
$s3->deleteObject([
'Bucket' => $s3_bucket,
'Key' => $_POST['file_key']
]);
echo "<div class='updated'>File deleted successfully: " . htmlspecialchars($_POST['file_key']) . "</div>";
} else {
echo "<div class='error'>Failed to initialize S3 client for deletion.</div>";
}
} catch (Aws\S3\Exception\S3Exception $e) {
echo "<div class='error'>Failed to delete file: " . $e->getMessage() . "</div>";
}
} else {
echo "<div class='error'>Security check failed (Invalid Nonce).</div>";
}
}
$message = '';
if (isset($_POST['submit']) && $_POST['submit'] == 'Upload')
$message = icc_upload_and_shorten_process_upload();
$storage_type = yourls_get_option('icc_upload_storage_type', 'local');
$share_url = yourls_get_option('icc_upload_share_url');
$share_dir = yourls_get_option('icc_upload_share_dir');
$suffix_length = yourls_get_option('icc_upload_suffix_length', 4);
// S3 Config
$s3_key = yourls_get_option('icc_upload_s3_key');
$s3_secret = yourls_get_option('icc_upload_s3_secret');
$s3_region = yourls_get_option('icc_upload_s3_region');
$s3_bucket = yourls_get_option('icc_upload_s3_bucket');
$s3_disable_acl = yourls_get_option('icc_upload_s3_disable_acl', false);
// input form
echo '
<h2>Upload & Shorten</h2>
<h3>Send a file to ' . ($storage_type == 's3' ? 'AWS S3' : 'your webserver') . ' and create a short-URL for it.</h3>';
// Limits Diagnostics
$max_upload = ini_get('upload_max_filesize');
$max_post = ini_get('post_max_size');
echo "<p><small>Server Limits: Upload Max Filesize: <strong>$max_upload</strong>, Post Max Size: <strong>$max_post</strong>. <br>The <strong>Smart Uploader</strong> bypasses these limits by splitting files into chunks!</small></p>";
if (!empty($message)) {
echo "<p><strong>$message</strong></p>";
}
if (
($storage_type == 'local' && (empty($share_url) || empty($share_dir))) ||
($storage_type == 's3' && (empty($s3_key) || empty($s3_secret) || empty($s3_region) || empty($s3_bucket)))
) {
echo '<p style="color:red"><strong>Please configure the plugin below before using this plugin.</strong></p>';
}
$chunk_nonce = yourls_create_nonce('icc_upload_chunk');
echo '
<form id="icc_upload_form" method="post" enctype="multipart/form-data">
<input type="hidden" name="action" value="upload_file" />
<input type="hidden" id="chunk_nonce" value="' . $chunk_nonce . '" />
<fieldset> <legend>Select a file </legend>
<p><input type="file" id="file_upload" name="file_upload" /></p>
<div id="progress_container" style="display:none; width: 100%; background-color: #f3f3f3; border: 1px solid #ccc; margin-top: 10px;">
<div id="progress_bar" style="width: 0%; height: 20px; background-color: #4caf50; text-align: center; color: white;">0%</div>
</div>
<p id="upload_status"></p>
</fieldset>';
// YOURLS options
echo '
<fieldset> <legend>YOURLS database options</legend>
<p><label for="custom_shortname">Custom shortname: </label>
<input type="text" id="custom_shortname" name="custom_shortname" />
<label for="custom_title">Custom title: </label>
<input type="text" id="custom_title" name="custom_title" /></p>
</fieldset>';
// filename handling
echo '
<fieldset> <legend>Filename conversions (optional)</legend>
<p><input type="radio" id="safe_filename" name="convert_filename" value="browser-safe" checked="checked" />
<label for="safe_filename">Browser-safe filename </label>
<small>(Recommended if the file should be accessed by web-browsers.)<br/ >
Ex.: "my not safe&clean filename #1.txt" -> https://example.com/my_not_safe_clean_filename_1.txt </small></p>
<p><input type="radio" id="safe_suffix" name="convert_filename" value="safe_suffix" />
<label for="safe_suffix">Browser-safe filename + random suffix </label>
<small>(Adds a random alphanumeric suffix to the filename.)<br/ >
Ex.: "file.txt" -> https://example.com/file_a1b2.txt </small></p>
<p><input type="radio" id="random_filename" name="convert_filename" value="randomized" />
<label for="random_filename">Randomize filename </label>
<small>(Browser-safe filenames with a slight protection against systematic crawling your web-directory.)<br/ >
Ex.: "mypicture.jpg" -> https://example.com/9a3e97434689.jpg </small></p>
</fieldset>';
// do it!
echo '
<p><input type="submit" id="submit_btn" name="submit" value="Upload" /></p>
</form>';
// JS for Chunked Upload
echo '
<script>
document.addEventListener("DOMContentLoaded", function() {
var form = document.getElementById("icc_upload_form");
var fileInput = document.getElementById("file_upload");
var progressBar = document.getElementById("progress_bar");
var progressContainer = document.getElementById("progress_container");
var status = document.getElementById("upload_status");
var submitBtn = document.getElementById("submit_btn");
form.onsubmit = function(event) {
if (fileInput.files.length === 0) return;
// Only capture if file is selected
event.preventDefault();
var file = fileInput.files[0];
var chunkSize = 2 * 1024 * 1024; // 2MB
var totalChunks = Math.ceil(file.size / chunkSize);
var chunkIndex = 0;
var uploadId = Date.now() + "_" + Math.random().toString(36).substr(2, 9);
var nonce = document.getElementById("chunk_nonce").value;
progressContainer.style.display = "block";
submitBtn.disabled = true;
status.innerHTML = "Uploading chunk 1 of " + totalChunks + "...";
function uploadNextChunk() {
var start = chunkIndex * chunkSize;
var end = Math.min(start + chunkSize, file.size);
var chunk = file.slice(start, end);
var formData = new FormData();
formData.append("action", "icc_upload_chunk");
formData.append("file_chunk", chunk);
formData.append("chunk_index", chunkIndex);
formData.append("upload_id", uploadId);
formData.append("nonce", nonce);
var xhr = new XMLHttpRequest();
xhr.open("POST", window.location.href, true);
xhr.onload = function() {
if (xhr.status === 200) {
try {
var resp = JSON.parse(xhr.responseText);
if (resp.status === "success") {
chunkIndex++;
var percent = Math.round((chunkIndex / totalChunks) * 100);
progressBar.style.width = percent + "%";
progressBar.innerText = percent + "%";
if (chunkIndex < totalChunks) {
status.innerHTML = "Uploading chunk " + (chunkIndex + 1) + " of " + totalChunks + "...";
uploadNextChunk();
} else {
status.innerHTML = "Refining upload...";
finishUpload();
}
} else {
status.innerHTML = "Error: " + resp.message;
submitBtn.disabled = false;
}
} catch(e) {
status.innerHTML = "Error parsing server response.";
submitBtn.disabled = false;
}
} else {
status.innerHTML = "Server error " + xhr.status;
submitBtn.disabled = false;
}
};
xhr.send(formData);
}
function finishUpload() {
var formData = new FormData();
// Append all form fields except the file input
var elements = form.elements;
for (var i = 0; i < elements.length; i++) {
var el = elements[i];
if (el.name && el.type !== \'file\' && el.name !== \'submit\') { // Skip file and submit button
if (el.type === \'radio\' || el.type === \'checkbox\') {
if (el.checked) formData.append(el.name, el.value);
} else {
formData.append(el.name, el.value);
}
}
}
formData.append("action", "icc_upload_finish");
formData.append("upload_id", uploadId);
formData.append("file_name", file.name);
formData.append("nonce", nonce);
var xhr = new XMLHttpRequest();
xhr.open("POST", window.location.href, true);
xhr.onload = function() {
submitBtn.disabled = false;
if (xhr.status === 200) {
try {
var resp = JSON.parse(xhr.responseText);
if (resp.status === "success") {
status.innerHTML = "Upload Complete!";
// Instead of redirecting, we replace body or show message
// Ideally, we reload or show the result HTML
var resultDiv = document.createElement("div");
resultDiv.innerHTML = resp.message;
form.parentNode.insertBefore(resultDiv, form);
form.reset();
progressContainer.style.display = "none";
} else {
status.innerHTML = "Error finishing upload: " + resp.message;
}
} catch(e) {
status.innerHTML = "Error finishing upload (Invalid JSON).";
console.log(xhr.responseText);
}
}
};
xhr.send(formData);
}
uploadNextChunk();
};
});
</script>
';
// File Manager
if ($storage_type == 's3' && !empty($s3_key) && !empty($s3_secret) && !empty($s3_bucket)) {
icc_upload_and_shorten_file_manager($s3_key, $s3_secret, $s3_region, $s3_bucket);
} elseif ($storage_type == 'local' && !empty($share_dir)) {
icc_upload_and_shorten_local_file_manager($share_dir, $share_url);
}
// Configuration Section
$nonce = yourls_create_nonce('icc_upload_and_shorten_settings');
echo '
<hr />
<h3>Configuration</h3>
<form method="post">
<input type="hidden" name="action" value="icc_upload_and_shorten_save" />
<input type="hidden" name="nonce" value="' . $nonce . '" />
<p>
<label for="icc_upload_storage_type"><strong>Storage Type:</strong></label><br />
<select name="icc_upload_storage_type" id="icc_upload_storage_type">
<option value="local" ' . ($storage_type == 'local' ? 'selected' : '') . '>Local Server</option>
<option value="s3" ' . ($storage_type == 's3' ? 'selected' : '') . '>AWS S3</option>
</select>
</p>
<h4>Local Server Settings</h4>
<p>
<label for="icc_upload_share_url">Share URL (The web URL path where YOURLS short-links will redirect to):</label><br />
<small>Example: <code>https://example.com/file/</code></small><br />
<input type="text" id="icc_upload_share_url" name="icc_upload_share_url" value="' . $share_url . '" size="50" />
</p>
<p>
<label for="icc_upload_share_dir">Share Directory (The physical path where uploads are stored):</label><br />
<small>Example: <code>/home/username/htdocs/example.com/file/</code> (Directory must exist)</small><br />
<input type="text" id="icc_upload_share_dir" name="icc_upload_share_dir" value="' . $share_dir . '" size="50" />
</p>
<h4>AWS S3 Settings</h4>
<p>
<label for="icc_upload_s3_key">AWS Access Key:</label><br />
<input type="text" id="icc_upload_s3_key" name="icc_upload_s3_key" value="' . $s3_key . '" size="50" />
</p>
<p>
<label for="icc_upload_s3_secret">AWS Secret Key:</label><br />
<input type="password" id="icc_upload_s3_secret" name="icc_upload_s3_secret" value="' . $s3_secret . '" size="50" />
</p>
<p>
<label for="icc_upload_s3_region">AWS Region:</label><br />
<input type="text" id="icc_upload_s3_region" name="icc_upload_s3_region" value="' . $s3_region . '" size="20" placeholder="us-east-1" />
</p>
<p>
<label for="icc_upload_s3_bucket">S3 Bucket Name:</label><br />
<input type="text" id="icc_upload_s3_bucket" name="icc_upload_s3_bucket" value="' . $s3_bucket . '" size="50" />
</p>
<p>
<input type="checkbox" id="icc_upload_s3_disable_acl" name="icc_upload_s3_disable_acl" ' . ($s3_disable_acl ? 'checked' : '') . ' />
<label for="icc_upload_s3_disable_acl"><strong>Disable ACLs</strong> (Check this if your bucket has "Block public access" or "Bucket Owner Enforced" enabled)</label>
</p>
<h4>General Settings</h4>
<p>
<label for="icc_upload_suffix_length">Random Suffix Length (For "Browser-safe + random suffix" option):</label><br />
<input type="number" id="icc_upload_suffix_length" name="icc_upload_suffix_length" value="' . $suffix_length . '" min="1" max="32" />
</p>
<p><input type="submit" value="Save Configuration" class="button-primary" /></p>
</form>
';
// footer
echo '
<hr style="margin-top: 40px" />
<p><strong><a href="https://ivancarlos.me/" target="_blank">Ivan Carlos</a></strong> &raquo;
<a href="https://buymeacoffee.com/ivancarlos" target="_blank">Buy Me a Coffee</a></p>';
}
function icc_upload_and_shorten_update_settings()
{
yourls_verify_nonce('icc_upload_and_shorten_settings', $_REQUEST['nonce']);
if (isset($_POST['icc_upload_storage_type']))
yourls_update_option('icc_upload_storage_type', $_POST['icc_upload_storage_type']);
if (isset($_POST['icc_upload_share_url']))
yourls_update_option('icc_upload_share_url', rtrim($_POST['icc_upload_share_url'], '/') . '/');
if (isset($_POST['icc_upload_share_dir']))
yourls_update_option('icc_upload_share_dir', rtrim($_POST['icc_upload_share_dir'], '/') . '/');
if (isset($_POST['icc_upload_s3_key']))
yourls_update_option('icc_upload_s3_key', trim($_POST['icc_upload_s3_key']));
if (isset($_POST['icc_upload_s3_secret']))
yourls_update_option('icc_upload_s3_secret', trim($_POST['icc_upload_s3_secret']));
if (isset($_POST['icc_upload_s3_region']))
yourls_update_option('icc_upload_s3_region', trim($_POST['icc_upload_s3_region']));
if (isset($_POST['icc_upload_s3_bucket']))
yourls_update_option('icc_upload_s3_bucket', trim($_POST['icc_upload_s3_bucket']));
if (isset($_POST['icc_upload_s3_disable_acl'])) {
yourls_update_option('icc_upload_s3_disable_acl', true);
} else {
yourls_update_option('icc_upload_s3_disable_acl', false);
}
if (isset($_POST['icc_upload_suffix_length']))
yourls_update_option('icc_upload_suffix_length', intval($_POST['icc_upload_suffix_length']));
echo "<div class='updated'>Settings saved</div>";
}
// Local File Manager Function
function icc_upload_and_shorten_local_file_manager($dir, $url)
{
echo '<hr />';
echo '<h3>Local File Manager</h3>';
if (!is_dir($dir)) {
echo '<p style="color:red">Directory not found: ' . htmlspecialchars($dir) . '</p>';
return;
}
$raw_files = scandir($dir);
$files = [];
foreach ($raw_files as $f) {
if ($f == '.' || $f == '..')
continue;
$full_path = rtrim($dir, '/') . '/' . $f;
// Exclude directories (like the temp ones if they exist)
if (!is_dir($full_path)) {
$files[] = $f;
}
}
// Sort by modification time (Newest first)
usort($files, function ($a, $b) use ($dir) {
return filemtime(rtrim($dir, '/') . '/' . $b) - filemtime(rtrim($dir, '/') . '/' . $a);
});
// $files = array_values($files); // Already indexed 0..n by sorting
// Pagination
$per_page = 20;
$total_files = count($files);
$total_pages = ceil($total_files / $per_page);
$current_page = isset($_GET['local_page']) ? max(1, intval($_GET['local_page'])) : 1;
$offset = ($current_page - 1) * $per_page;
$page_files = array_slice($files, $offset, $per_page);
if (empty($page_files)) {
echo '<p>No files found.</p>';
} else {
$nonce = yourls_create_nonce('icc_delete_local_file');
echo '<table class="widefat" style="margin-top:10px;">';
echo '<thead><tr><th>File Name</th><th>Size</th><th>Last Modified</th><th>Action</th></tr></thead>';
echo '<tbody>';
foreach ($page_files as $file) {
$filepath = rtrim($dir, '/') . '/' . $file;
$size = file_exists($filepath) ? round(filesize($filepath) / 1024, 2) . ' KB' : 'N/A';
$date = file_exists($filepath) ? date("Y-m-d H:i:s", filemtime($filepath)) : 'N/A';
$file_url = rtrim($url, '/') . '/' . $file;
echo '<tr>';
echo '<td><a href="' . htmlspecialchars($file_url) . '" target="_blank">' . htmlspecialchars($file) . '</a></td>';
echo '<td>' . $size . '</td>';
echo '<td>' . $date . '</td>';
echo '<td>';
echo '<form method="post" style="display:inline;" onsubmit="return confirm(\'Are you sure you want to delete ' . htmlspecialchars($file, ENT_QUOTES) . '?\');">';
echo '<input type="hidden" name="action" value="delete_local_file" />';
echo '<input type="hidden" name="file_name" value="' . htmlspecialchars($file) . '" />';
echo '<input type="hidden" name="nonce" value="' . $nonce . '" />';
echo '<input type="submit" value="Delete" class="button-secondary" />';
echo '</form>';
echo '</td>';
echo '</tr>';
}
echo '</tbody>';
echo '</table>';
// Pagination Controls
if ($total_pages > 1) {
echo '<div class="tablenav"><div class="tablenav-pages">';
echo '<span class="displaying-num">' . $total_files . ' items</span>';
$url = 'plugins.php?page=icc_upload_and_shorten';
if ($current_page > 1) {
echo '<a class="button" href="' . $url . '&local_page=1">&laquo; First</a> ';
echo '<a class="button" href="' . $url . '&local_page=' . ($current_page - 1) . '">&lsaquo; Previous</a> ';
}
echo '<span class="current-page"> Page ' . $current_page . ' of ' . $total_pages . ' </span>';
if ($current_page < $total_pages) {
echo ' <a class="button" href="' . $url . '&local_page=' . ($current_page + 1) . '">Next &rsaquo;</a> ';
echo '<a class="button" href="' . $url . '&local_page=' . $total_pages . '">Last &raquo;</a>';
}
echo '</div></div>';
}
}
// Recursive directory removal
function icc_rrmdir($dir)
{
if (is_dir($dir)) {
$objects = scandir($dir);
foreach ($objects as $object) {
if ($object != "." && $object != "..") {
if (is_dir($dir . "/" . $object) && !is_link($dir . "/" . $object))
icc_rrmdir($dir . "/" . $object);
else
unlink($dir . "/" . $object);
}
}
rmdir($dir);
}
}
// Cleanup Temp Folders
function icc_upload_and_shorten_cleanup_temp()
{
$temp_dir = yourls_get_option('icc_upload_share_dir');
if (!$temp_dir)
$temp_dir = sys_get_temp_dir();
if (!is_dir($temp_dir))
return;
// Scan for icc_temp_* directories
$files = scandir($temp_dir);
foreach ($files as $file) {
if ($file == '.' || $file == '..')
continue;
$path = rtrim($temp_dir, '/') . '/' . $file;
if (is_dir($path) && strpos($file, 'icc_temp_') === 0) {
// Check age (1 hour = 3600 seconds)
if (filemtime($path) < (time() - 3600)) {
icc_rrmdir($path);
}
}
}
}
}
// Check for AWS SDK
function icc_get_aws_client($key, $secret, $region)
{
if (!file_exists(dirname(__FILE__) . '/aws.phar')) {
return false;
}
require_once dirname(__FILE__) . '/aws.phar';
try {
$s3 = new Aws\S3\S3Client([
'version' => 'latest',
'region' => $region,
'credentials' => [
'key' => $key,
'secret' => $secret,
],
]);
return $s3;
} catch (Exception $e) {
return false;
}
}
// S3 File Manager Function
function icc_upload_and_shorten_file_manager($key, $secret, $region, $bucket)
{
echo '<hr />';
echo '<h3>S3 File Manager</h3>';
$s3 = icc_get_aws_client($key, $secret, $region);
if (!$s3) {
echo '<p style="color:red">Failed to initialize AWS Client.</p>';
return;
}
// Pagination
$continuation_token = isset($_GET['s3_next_token']) ? $_GET['s3_next_token'] : null;
try {
$params = [
'Bucket' => $bucket,
'MaxKeys' => 20
];
if ($continuation_token) {
$params['ContinuationToken'] = $continuation_token;
}
$objects = $s3->listObjectsV2($params);
if (!isset($objects['Contents']) || empty($objects['Contents'])) {
echo '<p>No files found in bucket.</p>';
if ($continuation_token) {
echo '<p><a href="plugins.php?page=icc_upload_and_shorten" class="button">Start Over</a></p>';
}
} else {
$nonce = yourls_create_nonce('icc_delete_file');
echo '<table class="widefat" style="margin-top:10px;">';
echo '<thead><tr><th>File Name</th><th>Size</th><th>Last Modified</th><th>Action</th></tr></thead>';
echo '<tbody>';
foreach ($objects['Contents'] as $object) {
// Construct the file URL (Path-style S3 URL format)
$file_url = "https://s3.{$region}.amazonaws.com/{$bucket}/" . $object['Key'];
echo '<tr>';
echo '<td><a href="' . htmlspecialchars($file_url) . '" target="_blank">' . htmlspecialchars($object['Key']) . '</a></td>';
echo '<td>' . round($object['Size'] / 1024, 2) . ' KB</td>';
echo '<td>' . $object['LastModified'] . '</td>';
echo '<td>';
echo '<form method="post" style="display:inline;" onsubmit="return confirm(\'Are you sure you want to delete ' . htmlspecialchars($object['Key'], ENT_QUOTES) . '?\');">';
echo '<input type="hidden" name="action" value="delete_file" />';
echo '<input type="hidden" name="file_key" value="' . htmlspecialchars($object['Key']) . '" />';
echo '<input type="hidden" name="nonce" value="' . $nonce . '" />';
echo '<input type="submit" value="Delete" class="button-secondary" />';
echo '</form>';
echo '</td>';
echo '</tr>';
}
echo '</tbody>';
echo '</table>';
echo '<p><small>Showing files from S3 bucket.</small></p>';
// Pagination History (to allow 'Previous')
$history_raw = isset($_GET['s3_history']) ? $_GET['s3_history'] : '';
$history = $history_raw ? explode(',', $history_raw) : [];
// Pagination Controls
echo '<div class="tablenav"><div class="tablenav-pages">';
$url_base = 'plugins.php?page=icc_upload_and_shorten';
// First Page
if ($continuation_token) {
echo '<a class="button" href="' . $url_base . '">&laquo; First</a> ';
}
// Previous Page
if (!empty($history)) {
$prev_token = array_pop($history);
$prev_history = implode(',', $history);
$prev_url = $url_base;
if ($prev_token && $prev_token !== '__TOP__') {
$prev_url .= '&s3_next_token=' . urlencode($prev_token);
}
if ($prev_history) {
$prev_url .= '&s3_history=' . urlencode($prev_history);
}
echo '<a class="button" href="' . $prev_url . '">&lsaquo; Previous</a> ';
}
// Next Page
if (isset($objects['NextContinuationToken'])) {
$next_token = $objects['NextContinuationToken'];
// Append current token to history
$current_history = $history_raw;
$token_to_add = $continuation_token ? $continuation_token : '__TOP__';
if ($current_history) {
$current_history .= ',' . $token_to_add;
} else {
$current_history = $token_to_add;
}
echo '<a class="next-page button" href="' . $url_base . '&s3_next_token=' . urlencode($next_token) . '&s3_history=' . urlencode($current_history) . '">Next &rsaquo;</a>';
}
echo '</div></div>';
}
} catch (Aws\S3\Exception\S3Exception $e) {
echo '<p style="color:red">Error listing files: ' . $e->getMessage() . '</p>';
}
}
// Update option in database
function icc_upload_and_shorten_process_upload($local_file_path = null, $original_filename = null)
{
// If not coming from chunked upload, standard validations
if (!$local_file_path) {
// did the user select any file?
if ($_FILES['file_upload']['error'] == UPLOAD_ERR_NO_FILE) {
return 'You need to select a file to upload.';
}
}
// Increase limits for processing large files
set_time_limit(0);
$storage_type = yourls_get_option('icc_upload_storage_type', 'local');
// Check Config
if ($storage_type == 'local') {
$my_url = yourls_get_option('icc_upload_share_url');
$my_uploaddir = yourls_get_option('icc_upload_share_dir');
if (empty($my_url) || empty($my_uploaddir))
return 'Plugin not configured for local storage.';
// Check if directory exists and is writable
if (!is_dir($my_uploaddir) || !is_writable($my_uploaddir)) {
return 'Upload directory does not exist or is not writable: ' . $my_uploaddir;
}
} elseif ($storage_type == 's3') {
$key = yourls_get_option('icc_upload_s3_key');
$secret = yourls_get_option('icc_upload_s3_secret');
$region = yourls_get_option('icc_upload_s3_region');
$bucket = yourls_get_option('icc_upload_s3_bucket');
$disable_acl = yourls_get_option('icc_upload_s3_disable_acl', false);
if (empty($key) || empty($secret) || empty($region) || empty($bucket))
return 'Plugin not configured for S3 storage.';
$s3 = icc_get_aws_client($key, $secret, $region);
if (!$s3)
return 'AWS SDK not found or failed to initialize, please ensure aws.phar is in the plugin folder.';
}
$file_name_to_use = $local_file_path ? $original_filename : $_FILES['file_upload']['name'];
// Handle the filename's extension
$my_upload_extension = pathinfo($file_name_to_use, PATHINFO_EXTENSION);
// If there is any extension at all then append it with a leading dot
$my_extension = '';
if (isset($my_upload_extension) && $my_upload_extension != NULL) {
$my_extension = '.' . $my_upload_extension;
}
$my_upload_filename = pathinfo($file_name_to_use, PATHINFO_FILENAME);
$my_filename = $my_upload_filename; // Default
if (isset($_POST['convert_filename'])) {
switch ($_POST['convert_filename']) {
case 'browser-safe': {
// make the filename web-safe:
$my_filename_trim = trim($my_upload_filename);
$my_filename_trim = strtolower($my_filename_trim); // Force lowercase
$my_extension = strtolower($my_extension);
$my_RemoveChars = array("([^()_\-\.,0-9a-zA-Z\[\]])"); // replace what's NOT in here!
$my_filename = preg_replace($my_RemoveChars, "_", $my_filename_trim);
$my_filename = preg_replace("(_{2,})", "_", $my_filename);
$my_extension = preg_replace($my_RemoveChars, "_", $my_extension);
$my_extension = preg_replace("(_{2,})", "_", $my_extension);
}
break;
case 'safe_suffix': {
// browser-safe + random suffix
$my_filename_trim = trim($my_upload_filename);
$my_filename_trim = strtolower($my_filename_trim); // Force lowercase
$my_extension = strtolower($my_extension);
$my_RemoveChars = array("([^()_\-\.,0-9a-zA-Z\[\]])");
$my_filename = preg_replace($my_RemoveChars, "_", $my_filename_trim);
$my_filename = preg_replace("(_{2,})", "_", $my_filename);
$my_extension = preg_replace($my_RemoveChars, "_", $my_extension);
$my_extension = preg_replace("(_{2,})", "_", $my_extension);
$suffix_length = yourls_get_option('icc_upload_suffix_length', 4);
$suffix = substr(str_shuffle('abcdefghijklmnopqrstuvwxyz0123456789'), 0, $suffix_length);
$my_filename .= '_' . $suffix;
}
break;
case 'randomized': {
// make up a random name for the uploaded file
$my_filename = substr(md5($my_upload_filename . strtotime("now")), 0, 12);
}
break;
}
}
// avoid duplicate filenames
if ($storage_type == 'local') {
$my_count = 2;
$my_path = $my_uploaddir . $my_filename . $my_extension;
$my_final_file_name = $my_filename . $my_extension;
while (file_exists($my_path)) {
$my_path = $my_uploaddir . $my_filename . '.' . $my_count . $my_extension;
$my_final_file_name = $my_filename . '.' . $my_count . $my_extension;
$my_count++;
}
} else {
// For S3, exact duplicate check is hard without API call, so we assume timestamp or suffix makes it unique enough
// Or we can just overwrite as S3 versioning might be on, but user asked for simple upload
// We will just use the name derived.
$my_final_file_name = $my_filename . $my_extension;
// If we are processing a chunked upload, source is the assembled file
// If it's a standard upload, it's the temp file
$my_path = $local_file_path ? $local_file_path : $_FILES['file_upload']['tmp_name'];
}
$my_upload_fullname = pathinfo($file_name_to_use, PATHINFO_BASENAME);
// Upload Logic
$upload_success = false;
if ($storage_type == 'local') {
// If local file path provided (Chunked), rename it to destination
if ($local_file_path) {
if (rename($local_file_path, $my_path)) {
$upload_success = true;
$final_url = $my_url . $my_final_file_name;
}
} else {
if (move_uploaded_file($_FILES['file_upload']['tmp_name'], $my_path)) {
$upload_success = true;
$final_url = $my_url . $my_final_file_name;
}
}
} elseif ($storage_type == 's3') {
try {
$args = [
'Bucket' => $bucket,
'Key' => $my_final_file_name,
'SourceFile' => $my_path,
];
if (!$disable_acl) {
$args['ACL'] = 'public-read';
}
$result = $s3->putObject($args);
// Cleanup temp file if it was a chunked upload
if ($local_file_path && file_exists($local_file_path)) {
unlink($local_file_path);
}
// Use S3 Object URL directly
$final_url = $result['ObjectURL'];
$upload_success = true;
} catch (Aws\S3\Exception\S3Exception $e) {
return '<font color="red">S3 Upload failed: ' . $e->getMessage() . '</font>';
}
}
if ($upload_success) {
// On success:
// obey custom shortname, if given:
$my_custom_shortname = '';
if (isset($_POST['custom_shortname']) && $_POST['custom_shortname'] != NULL) {
$my_custom_shortname = $_POST['custom_shortname'];
}
// change custom title, if given. Default is original filename, but if user provided one, use it:
$my_custom_title = $_POST['convert_filename'] . ': ' . $my_upload_fullname;
if (isset($_POST['custom_title']) && $_POST['custom_title'] != NULL) {
$my_custom_title = $_POST['custom_title'];
}
// let YOURLS create the link:
$my_short_url = yourls_add_new_link($final_url, $my_custom_shortname, $my_custom_title);
return '<font color="green">"' . $my_upload_fullname . '" successfully sent to ' . ($storage_type == 's3' ? 'S3' : 'Server') . '. Links:</font><br />' .
'Direct: <a href="' . $final_url . '" target="_blank">' . $final_url . '</a><br />' .
'Short: <a href="' . $my_short_url['shorturl'] . '" target="_blank">' . $my_short_url['shorturl'] . '</a>';
} else {
$error = isset($_FILES['file_upload']) ? $_FILES['file_upload']['error'] : 'Unknown error';
return '<font color="red">Upload failed, sorry! The error was ' . $error . '</font>';
}
}