commit 23ebc235256e82d7e2e4bcae5c2d91c01316f434 Author: Ivan Carlos Date: Wed Jan 21 20:28:24 2026 -0300 furst push diff --git a/.gitea/workflows/release_build.yml b/.gitea/workflows/release_build.yml new file mode 100644 index 0000000..88a7ec7 --- /dev/null +++ b/.gitea/workflows/release_build.yml @@ -0,0 +1,384 @@ +name: Build, Push, Publish + +on: + push: + branches: + - main + workflow_dispatch: + workflow_run: + workflows: ["Sync Repo"] + types: + - completed + +jobs: + release: + name: Build & Release + runs-on: ubuntu-latest + container: + image: catthehacker/ubuntu:act-latest + + permissions: + contents: write + packages: write + + steps: + - name: 📥 Checkout code with full history and tags + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Check for Dockerargs and Upstream Updates + id: check_upstream + run: | + if [ -f Dockerargs ]; then + echo "Dockerargs found. Checking upstream..." + echo "Dockerargs found. Checking upstream..." + # Parse repo info using awk to avoid git config restrictions on underscores in keys elsewhere in the file + REPO_URL=$(awk -F '=' '/\[repo\]/{flag=1; next} /\[/{flag=0} flag && /^url=/{print $2}' Dockerargs | tr -d ' \r\n') + REPO_BRANCH=$(awk -F '=' '/\[repo\]/{flag=1; next} /\[/{flag=0} flag && /^branch=/{print $2}' Dockerargs | tr -d ' \r\n') + if [ -z "$REPO_BRANCH" ]; then REPO_BRANCH="main"; fi + + # Fetch upstream SHA + if [ -n "$REPO_URL" ]; then + UPSTREAM_SHA=$(git ls-remote "$REPO_URL" "$REPO_BRANCH" | awk '{ print $1 }' | head -c 7) + echo "Upstream SHA: $UPSTREAM_SHA" + + if [ -f manifest.json ]; then + LOCAL_SHA=$(jq -r '.upstream_sha // empty' manifest.json) + else + LOCAL_SHA="" + fi + + if [ "$LOCAL_SHA" != "$UPSTREAM_SHA" ]; then + echo "Upstream changed ($LOCAL_SHA -> $UPSTREAM_SHA)." + echo "upstream_needs_update=true" >> "$GITHUB_OUTPUT" + echo "upstream_sha=$UPSTREAM_SHA" >> "$GITHUB_OUTPUT" + echo "repo_url=$REPO_URL" >> "$GITHUB_OUTPUT" + echo "repo_branch=$REPO_BRANCH" >> "$GITHUB_OUTPUT" + else + echo "Upstream up to date." + echo "upstream_needs_update=false" >> "$GITHUB_OUTPUT" + fi + + # Parse Build Args + echo "Parsing [args] from Dockerargs..." + ARGS_CONTENT=$(sed -n '/^\[args\]/,/^\[/p' Dockerargs | grep -v '^\[' | grep '=' || true) + if [ -n "$ARGS_CONTENT" ]; then + echo "Found args:" + echo "$ARGS_CONTENT" + echo "build_args<> "$GITHUB_OUTPUT" + echo "$ARGS_CONTENT" >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + else + echo "No args found." + echo "build_args=" >> "$GITHUB_OUTPUT" + fi + else + echo "Repo URL not found in Dockerargs." + echo "upstream_needs_update=false" >> "$GITHUB_OUTPUT" + echo "build_args=" >> "$GITHUB_OUTPUT" + fi + else + echo "No Dockerargs found." + echo "upstream_needs_update=false" >> "$GITHUB_OUTPUT" + echo "build_args=" >> "$GITHUB_OUTPUT" + fi + + - name: Check if any tags exist + id: check_tags_exist + run: | + git fetch --tags + TAG_COUNT=$(git tag | wc -l) + if [ "$TAG_COUNT" -eq 0 ]; then + echo "has_tags=false" >> "$GITHUB_OUTPUT" + echo "latest_tag=v0.0.0" >> "$GITHUB_OUTPUT" + else + echo "has_tags=true" >> "$GITHUB_OUTPUT" + LATEST_TAG=$(git describe --tags --abbrev=0) + echo "latest_tag=$LATEST_TAG" >> "$GITHUB_OUTPUT" + fi + + - name: Check if meaningful commits exist since latest tag + id: check_commits + run: | + UPSTREAM_UPDATE="${{ steps.check_upstream.outputs.upstream_needs_update }}" + if [ "$UPSTREAM_UPDATE" == "true" ]; then + echo "commit_count=1" >> "$GITHUB_OUTPUT" + echo "changed_files=Upstream Update to ${{ steps.check_upstream.outputs.upstream_sha }}" >> "$GITHUB_OUTPUT" + elif [ "${{ steps.check_tags_exist.outputs.has_tags }}" = "false" ]; then + # No tags exist, so we should create first release + echo "commit_count=1" >> "$GITHUB_OUTPUT" + CHANGED_FILES=$(git ls-files | grep -v '^manifest.json$' || true) + if [ -n "$CHANGED_FILES" ]; then + echo "changed_files<> "$GITHUB_OUTPUT" + printf '%s\n' "$CHANGED_FILES" >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + else + echo "changed_files=Initial release" >> "$GITHUB_OUTPUT" + fi + else + LATEST_TAG="${{ steps.check_tags_exist.outputs.latest_tag }}" + CHANGED_FILES="$(git diff --name-only "${LATEST_TAG}..HEAD" | grep -v '^manifest.json$' || true)" + if [ -n "$CHANGED_FILES" ]; then + echo "commit_count=1" >> "$GITHUB_OUTPUT" + echo "changed_files<> "$GITHUB_OUTPUT" + printf '%s\n' "$CHANGED_FILES" >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + else + echo "commit_count=0" >> "$GITHUB_OUTPUT" + fi + fi + + - name: Get latest release tag (from Gitea API) + id: get_latest_release + run: | + # Using Gitea API + LATEST_RELEASE_TAG=$(curl -sL -H "Accept: application/json" \ + -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ + "${{ gitea.api_url }}/repos/${{ gitea.repository }}/releases/latest" | jq -r .tag_name) + + if [ -z "$LATEST_RELEASE_TAG" ] || [ "$LATEST_RELEASE_TAG" = "null" ]; then + LATEST_RELEASE_TAG="v1.0.0" + fi + echo "latest_release_tag=$LATEST_RELEASE_TAG" >> "$GITHUB_OUTPUT" + echo "latest_release_version=${LATEST_RELEASE_TAG#v}" >> "$GITHUB_OUTPUT" + + # ------------------------------- + # Sync manifest.json to last release version if behind (only when no meaningful commits) + # ------------------------------- + - name: 🛠 Ensure manifest.json matches latest release version + if: steps.check_commits.outputs.commit_count == '0' + run: | + if [ -f manifest.json ]; then + MANIFEST_VERSION=$(jq -r '.version // empty' manifest.json) + else + MANIFEST_VERSION="" + fi + LATEST_RELEASE_VERSION="${{ steps.get_latest_release.outputs.latest_release_version }}" + PYTHON_CODE="from packaging import version; \ + print(version.parse('$LATEST_RELEASE_VERSION') > version.parse('$MANIFEST_VERSION') if '$MANIFEST_VERSION' else True)" + # Python3 is available in catthehacker/ubuntu:act-latest + NEED_UPDATE=$(python3 -c "$PYTHON_CODE") + if [ "$NEED_UPDATE" = "True" ]; then + echo "Updating manifest.json to version $LATEST_RELEASE_VERSION (sync with release)" + jq --arg v "$LATEST_RELEASE_VERSION" '.version = $v' manifest.json > tmp.json && mv tmp.json manifest.json + git config user.name "Gitea Actions" + git config user.email "actions@git.icc.gg" + git add manifest.json + git commit -m "Sync manifest.json to release $LATEST_RELEASE_VERSION [🔄]" || echo "Nothing to commit" + git push origin main || true + else + echo "Manifest.json is already up-to-date with the latest release." + fi + + # ------------------------------- + # Continue normal workflow if commits exist + # ------------------------------- + - name: 📃 Get list of changed files (Markdown bullet list) + if: steps.check_commits.outputs.commit_count != '0' + id: changed_files + run: | + BULLET_LIST="$(printf '%s\n' "${{ steps.check_commits.outputs.changed_files }}" | sed 's/^/- /')" + echo "CHANGED<> "$GITHUB_OUTPUT" + printf '%s\n' "$BULLET_LIST" >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + COUNT="$(printf '%s\n' "${{ steps.check_commits.outputs.changed_files }}" | wc -l)" + echo "COUNT=$COUNT" >> "$GITHUB_OUTPUT" + + - name: Get manifest version + if: steps.check_commits.outputs.commit_count != '0' + id: get_manifest_version + run: | + if [ -f manifest.json ]; then + MANIFEST_VERSION=$(jq -r '.version // empty' manifest.json) + if [ -z "$MANIFEST_VERSION" ] || [ "$MANIFEST_VERSION" = "null" ]; then + MANIFEST_VERSION="1.0.0" + fi + else + MANIFEST_VERSION="1.0.0" + fi + echo "manifest_version=$MANIFEST_VERSION" >> "$GITHUB_OUTPUT" + + - name: Pick base version + if: steps.check_commits.outputs.commit_count != '0' + id: pick_base_version + run: | + LATEST_RELEASE="${{ steps.get_latest_release.outputs.latest_release_version }}" + MANIFEST="${{ steps.get_manifest_version.outputs.manifest_version }}" + BASE_VERSION=$(python3 -c "from packaging import version; \ + print(str(max(version.parse('$LATEST_RELEASE'), version.parse('$MANIFEST'))))") + echo "base_version=$BASE_VERSION" >> "$GITHUB_OUTPUT" + + - name: 🔢 Determine version + if: steps.check_commits.outputs.commit_count != '0' + id: version + run: | + BASE_VERSION="${{ steps.pick_base_version.outputs.base_version }}" + MAJOR=$(echo "$BASE_VERSION" | cut -d. -f1) + MINOR=$(echo "$BASE_VERSION" | cut -d. -f2) + PATCH=$(echo "$BASE_VERSION" | cut -d. -f3) + COUNT="${{ steps.changed_files.outputs.COUNT }}" + if [ "$COUNT" -ge 5 ]; then + MAJOR=$((MAJOR + 1)) + MINOR=0 + PATCH=0 + elif [ "$COUNT" -ge 3 ]; then + MINOR=$((MINOR + 1)) + PATCH=0 + else + PATCH=$((PATCH + 1)) + fi + NEW_VERSION="${MAJOR}.${MINOR}.${PATCH}" + REPO_NAME="$(basename "$GITHUB_REPOSITORY")" + ZIP_NAME="${REPO_NAME}-${NEW_VERSION}.zip" + echo "VERSION=$NEW_VERSION" >> "$GITHUB_OUTPUT" + echo "ZIP_NAME=$ZIP_NAME" >> "$GITHUB_OUTPUT" + echo "REPO_NAME=$REPO_NAME" >> "$GITHUB_OUTPUT" + + - name: 🛠 Update or create manifest.json + if: steps.check_commits.outputs.commit_count != '0' + run: | + VERSION="${{ steps.version.outputs.VERSION }}" + AUTHOR="Ivan Carlos" + VERSION_FILE="manifest.json" + UPSTREAM_SHA="${{ steps.check_upstream.outputs.upstream_sha }}" + + if [ -f "$VERSION_FILE" ]; then + jq --arg v "$VERSION" \ + --arg a "$AUTHOR" \ + --arg u "$UPSTREAM_SHA" \ + '.version = $v | .author = $a | if $u != "" and $u != null then .upstream_sha = $u else . end' \ + "$VERSION_FILE" > tmp.json && mv tmp.json "$VERSION_FILE" + else + echo "{ \"version\": \"$VERSION\", \"author\": \"$AUTHOR\", \"upstream_sha\": \"$UPSTREAM_SHA\" }" > "$VERSION_FILE" + fi + + - name: 💾 Commit and push updated manifest.json + if: steps.check_commits.outputs.commit_count != '0' + run: | + git config user.name "Gitea Actions" + git config user.email "actions@git.icc.gg" + git add manifest.json + git commit -m "Update manifest version to ${{ steps.version.outputs.VERSION }} [▶️]" || echo "Nothing to commit" + git push origin main + + - name: 🛠 Install zip + if: steps.check_commits.outputs.commit_count != '0' + run: | + apt-get update && apt-get install -y zip + + - name: 📦 Create ZIP package (excluding certain files) + if: steps.check_commits.outputs.commit_count != '0' + run: | + ZIP_NAME="${{ steps.version.outputs.ZIP_NAME }}" + zip -r "$ZIP_NAME" . -x ".git/*" ".github/*" "docker/*" ".dockerignore" "CNAME" "Dockerfile" "README.md" "LICENSE" ".gitea/*" + + - name: 🚀 Create Gitea Release + if: steps.check_commits.outputs.commit_count != '0' + id: create_release + env: + CHANGELOG_LIST: ${{ steps.changed_files.outputs.CHANGED }} + run: | + TAG_NAME="v${{ steps.version.outputs.VERSION }}" + RELEASE_NAME="${{ steps.version.outputs.REPO_NAME }} v${{ steps.version.outputs.VERSION }}" + + # Construct Markdown body safely using env var + # We use printf to avoid interpreting backslashes in the file list + BODY=$(printf "### Changelog\nFiles changed in this release:\n%s" "$CHANGELOG_LIST") + + # Create JSON payload using jq + jq -n \ + --arg tag_name "$TAG_NAME" \ + --arg name "$RELEASE_NAME" \ + --arg body "$BODY" \ + '{tag_name: $tag_name, name: $name, body: $body, draft: false, prerelease: false}' > release_payload.json + + echo "DEBUG: Generated Payload:" + cat release_payload.json + + # Create Release + curl -s -X POST "${{ gitea.api_url }}/repos/${{ gitea.repository }}/releases" \ + -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ + -H "Content-Type: application/json" \ + -d @release_payload.json > api_response.json + + echo "DEBUG: API Response:" + cat api_response.json || true + + RELEASE_ID=$(jq -r .id api_response.json) + echo "RELEASE_ID=$RELEASE_ID" >> "$GITHUB_OUTPUT" + + if [ "$RELEASE_ID" == "null" ] || [ -z "$RELEASE_ID" ]; then + echo "Failed to create release. Response content:" + cat api_response.json + exit 1 + fi + + - name: 📤 Upload Release Asset + if: steps.check_commits.outputs.commit_count != '0' + run: | + RELEASE_ID="${{ steps.create_release.outputs.RELEASE_ID }}" + ZIP_NAME="${{ steps.version.outputs.ZIP_NAME }}" + FILE_PATH="./$ZIP_NAME" + + curl --fail -s -X POST "${{ gitea.api_url }}/repos/${{ gitea.repository }}/releases/$RELEASE_ID/assets?name=$ZIP_NAME" \ + -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ + -H "Content-Type: application/zip" \ + --data-binary @"$FILE_PATH" + + # ----- Docker steps ----- + - name: Clone Upstream Code (if needed) + if: steps.check_commits.outputs.commit_count != '0' && (steps.check_upstream.outputs.upstream_needs_update == 'true' || steps.check_upstream.outputs.repo_url != '') + run: | + rm -rf upstream_src + git clone --depth 1 --branch ${{ steps.check_upstream.outputs.repo_branch }} ${{ steps.check_upstream.outputs.repo_url }} upstream_src + + - name: 🔍 Check if Dockerfile exists + if: steps.check_commits.outputs.commit_count != '0' || steps.check_upstream.outputs.upstream_needs_update == 'true' + id: dockerfile_check + run: | + if [ -n "${{ steps.check_upstream.outputs.repo_url }}" ]; then + if [ -f upstream_src/Dockerfile ]; then + echo "exists=true" >> "$GITHUB_OUTPUT" + else + # Fallback or error? User said "ignore", but we need a dockerfile to build. + # Assuming if upstream_src is present, we trust it, or fail at build time. + # Let's say exists=true and let build fail if missing, per user hint. + echo "exists=true" >> "$GITHUB_OUTPUT" + fi + elif [ -f Dockerfile ]; then + echo "exists=true" >> "$GITHUB_OUTPUT" + else + echo "exists=false" >> "$GITHUB_OUTPUT" + fi + + - name: 🔐 Login to Gitea Container Registry + if: steps.check_commits.outputs.commit_count != '0' && steps.dockerfile_check.outputs.exists == 'true' + uses: docker/login-action@v3 + with: + registry: git.icc.gg + username: ${{ gitea.actor }} + password: ${{ secrets.CR_PAT }} + + - name: 🛠 Set up QEMU + if: steps.check_commits.outputs.commit_count != '0' && steps.dockerfile_check.outputs.exists == 'true' + uses: docker/setup-qemu-action@v3 + + - name: 🛠 Set up Docker Buildx + if: steps.check_commits.outputs.commit_count != '0' && steps.dockerfile_check.outputs.exists == 'true' + uses: docker/setup-buildx-action@v3 + + - name: 🐳 Build and Push Docker image + if: steps.check_commits.outputs.commit_count != '0' && steps.dockerfile_check.outputs.exists == 'true' + uses: docker/build-push-action@v5 + id: docker_build + with: + context: ${{ steps.check_upstream.outputs.repo_url != '' && './upstream_src' || '.' }} + platforms: linux/amd64,linux/arm64 + file: ${{ steps.check_upstream.outputs.repo_url != '' && './upstream_src/Dockerfile' || './Dockerfile' }} + push: true + build-args: | + ${{ steps.check_upstream.outputs.build_args }} + tags: | + git.icc.gg/${{ gitea.repository }}:latest + git.icc.gg/${{ gitea.repository }}:${{ steps.version.outputs.VERSION }} diff --git a/.gitea/workflows/update_readme.yml b/.gitea/workflows/update_readme.yml new file mode 100644 index 0000000..b1ccbaa --- /dev/null +++ b/.gitea/workflows/update_readme.yml @@ -0,0 +1,59 @@ +name: Update README + +permissions: + contents: write + +on: + workflow_dispatch: + schedule: + - cron: "0 4 * * *" # Every day at 4 AM UTC + +jobs: + update-readme: + runs-on: ubuntu-latest + container: + image: catthehacker/ubuntu:act-latest + + env: + SOURCE_REPO: ivancarlos/.gitea + SOURCE_BRANCH: main + + steps: + - name: Checkout current repository + uses: actions/checkout@v4 + + - name: Checkout source README template + uses: actions/checkout@v4 + with: + repository: ${{ env.SOURCE_REPO }} + ref: ${{ env.SOURCE_BRANCH }} + token: ${{ secrets.CR_PAT }} + path: source_readme + + - name: Update README.md (footer only) + run: | + set -e + + # --- Extract footer block from source (everything from onward) --- + FOOTER=$(awk '//{flag=1}flag' source_readme/README.md) + + # --- Replace everything after with FOOTER --- + awk -v footer="$FOOTER" ' + // { + print footer + found=1 + exit + } + { print } + ' README.md > README.tmp && mv README.tmp README.md + + - name: Remove source_readme from git index + run: rm -rf source_readme + + - name: Commit and push changes + run: | + git config user.name "Gitea Actions" + git config user.email "actions@git.icc.gg" + git add README.md + git commit -m "Sync README from template [▶️]" || echo "Nothing to commit" + git push origin ${{ github.ref_name }} diff --git a/README.md b/README.md new file mode 100755 index 0000000..c3358bc --- /dev/null +++ b/README.md @@ -0,0 +1,87 @@ +YOURLS Plugin: Upload and Shorten +================================= + +Plugin for [YOURLS](http://yourls.org) (version 1.7 or newer) + +Description +----------- +This plugin lets you upload a file to your webserver and automagically creates a YOURLS short-URL for it. Then you can share that file by its short link as well as its full URL. + +Features +-------- + * Different ways to change the filename during the upload + * Make a note about it for yourself in the YOURLS database (by default the title field is filled with the original filename and the alteration method) + * Keep track of views/downloads via YOURLS´s history function + * Localization support (currently: French, English, Spanish, German and Simplified Chinese. More translations provided by volounteers are greatly appreciated.) + +Requirements +------------ +What you need: + + * A webserver with PHP support + * A functional installation of [YOURLS](http://yourls.org) + * This Plugin ;-) + * A bit of understanding what it does and what you can do with it ;-) + +Installation +------------ + + * Navigate to the folder `./user/plugins/` inside your YOURLS-install directory + + * Use any of these two ways to install: + - **Either** clone this repo using `git` + - **or** create a new folder named ´Upload-and-Shorten´, then download all files from here *into that directory*. + + * Prepare your configuration: + * If necessary create a directory where your files can be accessed from the webserver (i.e '/full/path/to/httpd/directory/') + * Depending on your webserver´s setup you may have to modify the permissions of that directory: + - Make sure your webserver has read+write permissions for it. Explaining that is beyond the scope of this readme, please refer to the manual of your server, operating system or hosting provider. On a Linux box something like + `chown :www-data /full/path/to/httpd/directory && chmod g+rwx /full/path/to/httpd/directory` + should do the trick, but please don't rely on it. + **A correct server configuration is important for its functionality, but essential for its safety!** + * Now open `./user/config.php` in your YOURLS-directory with any text editor and ... + - add these definition lines and save the file: + `# Paths for plugin: "Upload-and-Shorten":` + `# The web URL path where YOURLS short-links will redirect to:` + `define( 'SHARE_URL', 'http://my.domain.tld/directory/' );` + `# The physical path where the plugin drops your files into:` + `define( 'SHARE_DIR', '/full/path/to/httpd/directory/' );` + (Adjust paths to your needs...) + + * Go to the Plugins Administration Page (eg. `http://sho.rt/admin/plugins.php`) and activate the plugin. + + * Have fun! + + * Consider helping with translations. + +Bugs & Issues +------------- +No critical misbehaviour known, most issues are caused by configuration errors. +Beware of scripts and plugins which validate URLs or intercept the data flow. ~~Namely the plugin "Check URL" can interfere with this plugin,~~ (This issue has been fixed for basic setups, see [issue #11](https://github.com/fredl99/YOURLS-Upload-and-Shorten/issues/11).) However, there might still occur interferences with plugins which check target URLs or manipulate the database by themselves. So, when you notice a strange behaviour always think about this and if you report an issue please include a list of installed and activated plugins. + +Localization (l10n) +-------------------- +This plugin supports **localization** (translations into your language). +**For this to work you need at least YOURLS v1.7 from March 1, 2015**. It will basically work fine with earlier versions, except that translations won't work because of a minor bug in the YOURLS-code. Just upgrade to the latest YOURLS version and it will do. + +The default language is English. Translation files for French, German, Spanish and Simplified Chinese are included in the folder `l10n/`. To use this feature you just have to define your locale in `user/config.php` like this: +`define( 'YOURLS_LANG', 'de_DE' );` +(can be found within the standard YOURLS options there) + +Looking for translators +----------------------- +If you're willing to provide translations, please [read this](http://blog.yourls.org/2013/02/workshop-how-to-create-your-own-translation-file-for-yourls/). If necessary you can contact me for further instructions. Any help is appreciated, at most by your fellow countrymen! + +Donations +--------- +There are many ways to integrate this plugin into your daily routines. The more you use it the more you will discover. The more you discover the more you will like it. +If you do, remember someone spends his time for improving it. If you want say thanks for that, just [buy him a coffee](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=H5B9UKVYP88X4). That will certainly motivate him to make further enhancements. Just for You! ... +![](https://s.fredls.net/wjotnlsc1igvzq) and him :) + +License +------- +**Free for personal use only.** +If you want to make money with it you have to contact me first. + +Thanks for your attention. + diff --git a/aws.phar b/aws.phar new file mode 100755 index 0000000..9bf8efc Binary files /dev/null and b/aws.phar differ diff --git a/plugin.php b/plugin.php new file mode 100755 index 0000000..69aa46f --- /dev/null +++ b/plugin.php @@ -0,0 +1,964 @@ + 'error', 'message' => 'Authentication failed']); + die(); + } + + if ($_POST['action'] == 'icc_upload_chunk') { + icc_upload_and_shorten_handle_chunk(); + } + if ($_POST['action'] == 'icc_upload_finish') { + icc_upload_and_shorten_handle_finish(); + } + } +} + +function icc_upload_and_shorten_handle_chunk() +{ + $nonce = $_POST['nonce'] ?? ''; + if (!yourls_verify_nonce('icc_upload_chunk', $nonce)) { + echo json_encode(['status' => 'error', 'message' => 'Security check failed']); + die(); + } + + if (!isset($_FILES['file_chunk']) || $_FILES['file_chunk']['error'] != UPLOAD_ERR_OK) { + echo json_encode(['status' => 'error', 'message' => 'Upload error']); + die(); + } + + $upload_id = preg_replace('/[^a-zA-Z0-9_]/', '', $_POST['upload_id']); + $temp_dir = yourls_get_option('icc_upload_share_dir'); + if (!$temp_dir) + $temp_dir = sys_get_temp_dir(); + + // Create a temp directory for this upload + $target_dir = rtrim($temp_dir, '/') . '/icc_temp_' . $upload_id; + if (!is_dir($target_dir)) + mkdir($target_dir, 0755, true); + + $chunk_index = intval($_POST['chunk_index']); + $target_file = $target_dir . '/part_' . $chunk_index; + + if (move_uploaded_file($_FILES['file_chunk']['tmp_name'], $target_file)) { + echo json_encode(['status' => 'success']); + } else { + echo json_encode(['status' => 'error', 'message' => 'Failed to move chunk']); + } + die(); +} + +function icc_upload_and_shorten_handle_finish() +{ + $nonce = $_POST['nonce'] ?? ''; + if (!yourls_verify_nonce('icc_upload_chunk', $nonce)) { + echo json_encode(['status' => 'error', 'message' => 'Security check failed']); + die(); + } + + $upload_id = preg_replace('/[^a-zA-Z0-9_]/', '', $_POST['upload_id']); + $file_name = $_POST['file_name']; + $temp_dir = yourls_get_option('icc_upload_share_dir'); + if (!$temp_dir) + $temp_dir = sys_get_temp_dir(); + + $target_dir = rtrim($temp_dir, '/') . '/icc_temp_' . $upload_id; + $final_file_path = $target_dir . '/' . $file_name; + + // Assemble chunks + if ($fp = fopen($final_file_path, 'wb')) { + $chunks = glob($target_dir . '/part_*'); + natsort($chunks); + foreach ($chunks as $chunk) { + $chunk_content = file_get_contents($chunk); + fwrite($fp, $chunk_content); + unlink($chunk); + } + fclose($fp); + rmdir($target_dir); // Remove temp dir + + // now process the file + // Pass essential POST data for filename conversion if needed + + $result = icc_upload_and_shorten_process_upload($final_file_path, $file_name); + + // Since the result is HTML string, we might want to return it or parse it + // But for this AJAX response we return it in message + echo json_encode(['status' => 'success', 'message' => $result]); + } else { + echo json_encode(['status' => 'error', 'message' => 'Failed to assemble file']); + } + die(); +} + +// Display admin page +function icc_upload_and_shorten_do_page() +{ + // Check if a form was submitted + if (isset($_POST['action']) && $_POST['action'] == 'icc_upload_and_shorten_save') { + icc_upload_and_shorten_update_settings(); + } + + // Handle Deletion + if (isset($_POST['action']) && $_POST['action'] == 'delete_local_file' && isset($_POST['file_name'])) { + $nonce = $_POST['nonce'] ?? ''; + if (yourls_verify_nonce('icc_delete_local_file', $nonce)) { + $share_dir = yourls_get_option('icc_upload_share_dir'); + $file_name = $_POST['file_name']; + // Validating filename to prevent directory traversal + if (basename($file_name) == $file_name) { + $file_path = rtrim($share_dir, '/') . '/' . $file_name; + if (file_exists($file_path)) { + if (unlink($file_path)) { + echo "
File deleted successfully: " . htmlspecialchars($file_name) . "
"; + } else { + echo "
Failed to delete file. Check permissions.
"; + } + } else { + echo "
File not found.
"; + } + } else { + echo "
Invalid filename.
"; + } + } else { + echo "
Security check failed.
"; + } + } + + if (isset($_POST['action']) && $_POST['action'] == 'delete_file' && isset($_POST['file_key'])) { + $nonce = $_POST['nonce'] ?? ''; + if (yourls_verify_nonce('icc_delete_file', $nonce)) { + try { + $s3_key = yourls_get_option('icc_upload_s3_key'); + $s3_secret = yourls_get_option('icc_upload_s3_secret'); + $s3_region = yourls_get_option('icc_upload_s3_region'); + $s3_bucket = yourls_get_option('icc_upload_s3_bucket'); + + $s3 = icc_get_aws_client($s3_key, $s3_secret, $s3_region); + if ($s3) { + $s3->deleteObject([ + 'Bucket' => $s3_bucket, + 'Key' => $_POST['file_key'] + ]); + echo "
File deleted successfully: " . htmlspecialchars($_POST['file_key']) . "
"; + } else { + echo "
Failed to initialize S3 client for deletion.
"; + } + } catch (Aws\S3\Exception\S3Exception $e) { + echo "
Failed to delete file: " . $e->getMessage() . "
"; + } + } else { + echo "
Security check failed (Invalid Nonce).
"; + } + } + + $message = ''; + if (isset($_POST['submit']) && $_POST['submit'] == 'Upload') + $message = icc_upload_and_shorten_process_upload(); + + $storage_type = yourls_get_option('icc_upload_storage_type', 'local'); + $share_url = yourls_get_option('icc_upload_share_url'); + $share_dir = yourls_get_option('icc_upload_share_dir'); + $suffix_length = yourls_get_option('icc_upload_suffix_length', 4); + + // S3 Config + $s3_key = yourls_get_option('icc_upload_s3_key'); + $s3_secret = yourls_get_option('icc_upload_s3_secret'); + $s3_region = yourls_get_option('icc_upload_s3_region'); + $s3_bucket = yourls_get_option('icc_upload_s3_bucket'); + $s3_disable_acl = yourls_get_option('icc_upload_s3_disable_acl', false); + + // input form + echo ' +

Upload & Shorten

+

Send a file to ' . ($storage_type == 's3' ? 'AWS S3' : 'your webserver') . ' and create a short-URL for it.

'; + + // Limits Diagnostics + $max_upload = ini_get('upload_max_filesize'); + $max_post = ini_get('post_max_size'); + echo "

Server Limits: Upload Max Filesize: $max_upload, Post Max Size: $max_post.
The Smart Uploader bypasses these limits by splitting files into chunks!

"; + + if (!empty($message)) { + echo "

$message

"; + } + + if ( + ($storage_type == 'local' && (empty($share_url) || empty($share_dir))) || + ($storage_type == 's3' && (empty($s3_key) || empty($s3_secret) || empty($s3_region) || empty($s3_bucket))) + ) { + echo '

Please configure the plugin below before using this plugin.

'; + } + + $chunk_nonce = yourls_create_nonce('icc_upload_chunk'); + + echo ' +
+ + + +
Select a file +

+ + +

+ +
'; + + // YOURLS options + echo ' +
YOURLS database options + +

+ + + +

+
'; + + // filename handling + echo ' +
Filename conversions (optional) + +

+ + (Recommended if the file should be accessed by web-browsers.)
+ Ex.: "my not safe&clean filename #1.txt" -> https://example.com/my_not_safe_clean_filename_1.txt

+ +

+ + (Adds a random alphanumeric suffix to the filename.)
+ Ex.: "file.txt" -> https://example.com/file_a1b2.txt

+ +

+ + (Browser-safe filenames with a slight protection against systematic crawling your web-directory.)
+ Ex.: "mypicture.jpg" -> https://example.com/9a3e97434689.jpg

+ +
'; + + // do it! + echo ' +

+
'; + + // JS for Chunked Upload + echo ' + + '; + + // File Manager + if ($storage_type == 's3' && !empty($s3_key) && !empty($s3_secret) && !empty($s3_bucket)) { + icc_upload_and_shorten_file_manager($s3_key, $s3_secret, $s3_region, $s3_bucket); + } elseif ($storage_type == 'local' && !empty($share_dir)) { + icc_upload_and_shorten_local_file_manager($share_dir, $share_url); + } + + // Configuration Section + $nonce = yourls_create_nonce('icc_upload_and_shorten_settings'); + echo ' +
+

Configuration

+
+ + + +

+
+ +

+ +

Local Server Settings

+

+
+ Example: https://example.com/file/
+ +

+

+
+ Example: /home/username/htdocs/example.com/file/ (Directory must exist)
+ +

+ +

AWS S3 Settings

+

+
+ +

+

+
+ +

+

+
+ +

+

+
+ +

+

+ + +

+ +

General Settings

+

+
+ +

+ +

+
+ '; + + // footer + echo ' + +
+

Ivan Carlos » +Buy Me a Coffee

'; +} + +function icc_upload_and_shorten_update_settings() +{ + yourls_verify_nonce('icc_upload_and_shorten_settings', $_REQUEST['nonce']); + + if (isset($_POST['icc_upload_storage_type'])) + yourls_update_option('icc_upload_storage_type', $_POST['icc_upload_storage_type']); + + if (isset($_POST['icc_upload_share_url'])) + yourls_update_option('icc_upload_share_url', rtrim($_POST['icc_upload_share_url'], '/') . '/'); + + if (isset($_POST['icc_upload_share_dir'])) + yourls_update_option('icc_upload_share_dir', rtrim($_POST['icc_upload_share_dir'], '/') . '/'); + + if (isset($_POST['icc_upload_s3_key'])) + yourls_update_option('icc_upload_s3_key', trim($_POST['icc_upload_s3_key'])); + if (isset($_POST['icc_upload_s3_secret'])) + yourls_update_option('icc_upload_s3_secret', trim($_POST['icc_upload_s3_secret'])); + if (isset($_POST['icc_upload_s3_region'])) + yourls_update_option('icc_upload_s3_region', trim($_POST['icc_upload_s3_region'])); + if (isset($_POST['icc_upload_s3_bucket'])) + yourls_update_option('icc_upload_s3_bucket', trim($_POST['icc_upload_s3_bucket'])); + + if (isset($_POST['icc_upload_s3_disable_acl'])) { + yourls_update_option('icc_upload_s3_disable_acl', true); + } else { + yourls_update_option('icc_upload_s3_disable_acl', false); + } + + if (isset($_POST['icc_upload_suffix_length'])) + yourls_update_option('icc_upload_suffix_length', intval($_POST['icc_upload_suffix_length'])); + + echo "
Settings saved
"; +} + +// Local File Manager Function +function icc_upload_and_shorten_local_file_manager($dir, $url) +{ + echo '
'; + echo '

Local File Manager

'; + + if (!is_dir($dir)) { + echo '

Directory not found: ' . htmlspecialchars($dir) . '

'; + return; + } + + $raw_files = scandir($dir); + $files = []; + foreach ($raw_files as $f) { + if ($f == '.' || $f == '..') + continue; + $full_path = rtrim($dir, '/') . '/' . $f; + // Exclude directories (like the temp ones if they exist) + if (!is_dir($full_path)) { + $files[] = $f; + } + } + + // Sort by modification time (Newest first) + usort($files, function ($a, $b) use ($dir) { + return filemtime(rtrim($dir, '/') . '/' . $b) - filemtime(rtrim($dir, '/') . '/' . $a); + }); + + // $files = array_values($files); // Already indexed 0..n by sorting + + // Pagination + $per_page = 20; + $total_files = count($files); + $total_pages = ceil($total_files / $per_page); + $current_page = isset($_GET['local_page']) ? max(1, intval($_GET['local_page'])) : 1; + $offset = ($current_page - 1) * $per_page; + + $page_files = array_slice($files, $offset, $per_page); + + if (empty($page_files)) { + echo '

No files found.

'; + } else { + $nonce = yourls_create_nonce('icc_delete_local_file'); + echo ''; + echo ''; + echo ''; + + foreach ($page_files as $file) { + $filepath = rtrim($dir, '/') . '/' . $file; + $size = file_exists($filepath) ? round(filesize($filepath) / 1024, 2) . ' KB' : 'N/A'; + $date = file_exists($filepath) ? date("Y-m-d H:i:s", filemtime($filepath)) : 'N/A'; + $file_url = rtrim($url, '/') . '/' . $file; + + echo ''; + echo ''; + echo ''; + echo ''; + echo ''; + echo ''; + } + echo ''; + echo '
File NameSizeLast ModifiedAction
' . htmlspecialchars($file) . '' . $size . '' . $date . ''; + echo '
'; + echo ''; + echo ''; + echo ''; + echo ''; + echo '
'; + echo '
'; + + // Pagination Controls + if ($total_pages > 1) { + echo '
'; + echo '' . $total_files . ' items'; + $url = 'plugins.php?page=icc_upload_and_shorten'; + + if ($current_page > 1) { + echo '« First '; + echo '‹ Previous '; + } + + echo ' Page ' . $current_page . ' of ' . $total_pages . ' '; + + if ($current_page < $total_pages) { + echo ' Next › '; + echo 'Last »'; + } + echo '
'; + } + } + + // Recursive directory removal + function icc_rrmdir($dir) + { + if (is_dir($dir)) { + $objects = scandir($dir); + foreach ($objects as $object) { + if ($object != "." && $object != "..") { + if (is_dir($dir . "/" . $object) && !is_link($dir . "/" . $object)) + icc_rrmdir($dir . "/" . $object); + else + unlink($dir . "/" . $object); + } + } + rmdir($dir); + } + } + + // Cleanup Temp Folders + function icc_upload_and_shorten_cleanup_temp() + { + $temp_dir = yourls_get_option('icc_upload_share_dir'); + if (!$temp_dir) + $temp_dir = sys_get_temp_dir(); + + if (!is_dir($temp_dir)) + return; + + // Scan for icc_temp_* directories + $files = scandir($temp_dir); + foreach ($files as $file) { + if ($file == '.' || $file == '..') + continue; + + $path = rtrim($temp_dir, '/') . '/' . $file; + if (is_dir($path) && strpos($file, 'icc_temp_') === 0) { + // Check age (1 hour = 3600 seconds) + if (filemtime($path) < (time() - 3600)) { + icc_rrmdir($path); + } + } + } + } +} + +// Check for AWS SDK +function icc_get_aws_client($key, $secret, $region) +{ + if (!file_exists(dirname(__FILE__) . '/aws.phar')) { + return false; + } + require_once dirname(__FILE__) . '/aws.phar'; + + try { + $s3 = new Aws\S3\S3Client([ + 'version' => 'latest', + 'region' => $region, + 'credentials' => [ + 'key' => $key, + 'secret' => $secret, + ], + ]); + return $s3; + } catch (Exception $e) { + return false; + } +} + +// S3 File Manager Function +function icc_upload_and_shorten_file_manager($key, $secret, $region, $bucket) +{ + echo '
'; + echo '

S3 File Manager

'; + + $s3 = icc_get_aws_client($key, $secret, $region); + if (!$s3) { + echo '

Failed to initialize AWS Client.

'; + return; + } + + // Pagination + $continuation_token = isset($_GET['s3_next_token']) ? $_GET['s3_next_token'] : null; + + try { + $params = [ + 'Bucket' => $bucket, + 'MaxKeys' => 20 + ]; + + if ($continuation_token) { + $params['ContinuationToken'] = $continuation_token; + } + + $objects = $s3->listObjectsV2($params); + + if (!isset($objects['Contents']) || empty($objects['Contents'])) { + echo '

No files found in bucket.

'; + if ($continuation_token) { + echo '

Start Over

'; + } + } else { + $nonce = yourls_create_nonce('icc_delete_file'); + echo ''; + echo ''; + echo ''; + foreach ($objects['Contents'] as $object) { + // Construct the file URL (Path-style S3 URL format) + $file_url = "https://s3.{$region}.amazonaws.com/{$bucket}/" . $object['Key']; + + echo ''; + echo ''; + echo ''; + echo ''; + echo ''; + echo ''; + } + echo ''; + echo '
File NameSizeLast ModifiedAction
' . htmlspecialchars($object['Key']) . '' . round($object['Size'] / 1024, 2) . ' KB' . $object['LastModified'] . ''; + echo '
'; + echo ''; + echo ''; + echo ''; + echo ''; + echo '
'; + echo '
'; + echo '

Showing files from S3 bucket.

'; + + // Pagination History (to allow 'Previous') + $history_raw = isset($_GET['s3_history']) ? $_GET['s3_history'] : ''; + $history = $history_raw ? explode(',', $history_raw) : []; + + // Pagination Controls + echo '
'; + $url_base = 'plugins.php?page=icc_upload_and_shorten'; + + // First Page + if ($continuation_token) { + echo '« First '; + } + + // Previous Page + if (!empty($history)) { + $prev_token = array_pop($history); + $prev_history = implode(',', $history); + $prev_url = $url_base; + if ($prev_token && $prev_token !== '__TOP__') { + $prev_url .= '&s3_next_token=' . urlencode($prev_token); + } + if ($prev_history) { + $prev_url .= '&s3_history=' . urlencode($prev_history); + } + echo '‹ Previous '; + } + + // Next Page + if (isset($objects['NextContinuationToken'])) { + $next_token = $objects['NextContinuationToken']; + // Append current token to history + $current_history = $history_raw; + $token_to_add = $continuation_token ? $continuation_token : '__TOP__'; + if ($current_history) { + $current_history .= ',' . $token_to_add; + } else { + $current_history = $token_to_add; + } + echo 'Next ›'; + } + echo '
'; + } + } catch (Aws\S3\Exception\S3Exception $e) { + echo '

Error listing files: ' . $e->getMessage() . '

'; + } +} + +// Update option in database +function icc_upload_and_shorten_process_upload($local_file_path = null, $original_filename = null) +{ + // If not coming from chunked upload, standard validations + if (!$local_file_path) { + // did the user select any file? + if ($_FILES['file_upload']['error'] == UPLOAD_ERR_NO_FILE) { + return 'You need to select a file to upload.'; + } + } + + // Increase limits for processing large files + set_time_limit(0); + + $storage_type = yourls_get_option('icc_upload_storage_type', 'local'); + + // Check Config + if ($storage_type == 'local') { + $my_url = yourls_get_option('icc_upload_share_url'); + $my_uploaddir = yourls_get_option('icc_upload_share_dir'); + if (empty($my_url) || empty($my_uploaddir)) + return 'Plugin not configured for local storage.'; + + // Check if directory exists and is writable + if (!is_dir($my_uploaddir) || !is_writable($my_uploaddir)) { + return 'Upload directory does not exist or is not writable: ' . $my_uploaddir; + } + } elseif ($storage_type == 's3') { + $key = yourls_get_option('icc_upload_s3_key'); + $secret = yourls_get_option('icc_upload_s3_secret'); + $region = yourls_get_option('icc_upload_s3_region'); + $bucket = yourls_get_option('icc_upload_s3_bucket'); + $disable_acl = yourls_get_option('icc_upload_s3_disable_acl', false); + if (empty($key) || empty($secret) || empty($region) || empty($bucket)) + return 'Plugin not configured for S3 storage.'; + + $s3 = icc_get_aws_client($key, $secret, $region); + if (!$s3) + return 'AWS SDK not found or failed to initialize, please ensure aws.phar is in the plugin folder.'; + } + + $file_name_to_use = $local_file_path ? $original_filename : $_FILES['file_upload']['name']; + + // Handle the filename's extension + $my_upload_extension = pathinfo($file_name_to_use, PATHINFO_EXTENSION); + + // If there is any extension at all then append it with a leading dot + $my_extension = ''; + if (isset($my_upload_extension) && $my_upload_extension != NULL) { + $my_extension = '.' . $my_upload_extension; + } + + $my_upload_filename = pathinfo($file_name_to_use, PATHINFO_FILENAME); + $my_filename = $my_upload_filename; // Default + + if (isset($_POST['convert_filename'])) { + switch ($_POST['convert_filename']) { + case 'browser-safe': { + // make the filename web-safe: + $my_filename_trim = trim($my_upload_filename); + $my_filename_trim = strtolower($my_filename_trim); // Force lowercase + $my_extension = strtolower($my_extension); + $my_RemoveChars = array("([^()_\-\.,0-9a-zA-Z\[\]])"); // replace what's NOT in here! + $my_filename = preg_replace($my_RemoveChars, "_", $my_filename_trim); + $my_filename = preg_replace("(_{2,})", "_", $my_filename); + $my_extension = preg_replace($my_RemoveChars, "_", $my_extension); + $my_extension = preg_replace("(_{2,})", "_", $my_extension); + } + break; + + case 'safe_suffix': { + // browser-safe + random suffix + $my_filename_trim = trim($my_upload_filename); + $my_filename_trim = strtolower($my_filename_trim); // Force lowercase + $my_extension = strtolower($my_extension); + $my_RemoveChars = array("([^()_\-\.,0-9a-zA-Z\[\]])"); + $my_filename = preg_replace($my_RemoveChars, "_", $my_filename_trim); + $my_filename = preg_replace("(_{2,})", "_", $my_filename); + $my_extension = preg_replace($my_RemoveChars, "_", $my_extension); + $my_extension = preg_replace("(_{2,})", "_", $my_extension); + + $suffix_length = yourls_get_option('icc_upload_suffix_length', 4); + $suffix = substr(str_shuffle('abcdefghijklmnopqrstuvwxyz0123456789'), 0, $suffix_length); + $my_filename .= '_' . $suffix; + } + break; + + case 'randomized': { + // make up a random name for the uploaded file + $my_filename = substr(md5($my_upload_filename . strtotime("now")), 0, 12); + } + break; + } + } + + // avoid duplicate filenames + if ($storage_type == 'local') { + $my_count = 2; + $my_path = $my_uploaddir . $my_filename . $my_extension; + $my_final_file_name = $my_filename . $my_extension; + + while (file_exists($my_path)) { + $my_path = $my_uploaddir . $my_filename . '.' . $my_count . $my_extension; + $my_final_file_name = $my_filename . '.' . $my_count . $my_extension; + $my_count++; + } + } else { + // For S3, exact duplicate check is hard without API call, so we assume timestamp or suffix makes it unique enough + // Or we can just overwrite as S3 versioning might be on, but user asked for simple upload + // We will just use the name derived. + $my_final_file_name = $my_filename . $my_extension; + + // If we are processing a chunked upload, source is the assembled file + // If it's a standard upload, it's the temp file + $my_path = $local_file_path ? $local_file_path : $_FILES['file_upload']['tmp_name']; + } + + $my_upload_fullname = pathinfo($file_name_to_use, PATHINFO_BASENAME); + + // Upload Logic + $upload_success = false; + + if ($storage_type == 'local') { + // If local file path provided (Chunked), rename it to destination + if ($local_file_path) { + if (rename($local_file_path, $my_path)) { + $upload_success = true; + $final_url = $my_url . $my_final_file_name; + } + } else { + if (move_uploaded_file($_FILES['file_upload']['tmp_name'], $my_path)) { + $upload_success = true; + $final_url = $my_url . $my_final_file_name; + } + } + } elseif ($storage_type == 's3') { + try { + $args = [ + 'Bucket' => $bucket, + 'Key' => $my_final_file_name, + 'SourceFile' => $my_path, + ]; + + if (!$disable_acl) { + $args['ACL'] = 'public-read'; + } + + $result = $s3->putObject($args); + + // Cleanup temp file if it was a chunked upload + if ($local_file_path && file_exists($local_file_path)) { + unlink($local_file_path); + } + + // Use S3 Object URL directly + $final_url = $result['ObjectURL']; + $upload_success = true; + } catch (Aws\S3\Exception\S3Exception $e) { + return 'S3 Upload failed: ' . $e->getMessage() . ''; + } + } + + if ($upload_success) { + // On success: + // obey custom shortname, if given: + $my_custom_shortname = ''; + if (isset($_POST['custom_shortname']) && $_POST['custom_shortname'] != NULL) { + $my_custom_shortname = $_POST['custom_shortname']; + } + // change custom title, if given. Default is original filename, but if user provided one, use it: + $my_custom_title = $_POST['convert_filename'] . ': ' . $my_upload_fullname; + if (isset($_POST['custom_title']) && $_POST['custom_title'] != NULL) { + $my_custom_title = $_POST['custom_title']; + } + + // let YOURLS create the link: + $my_short_url = yourls_add_new_link($final_url, $my_custom_shortname, $my_custom_title); + + return '"' . $my_upload_fullname . '" successfully sent to ' . ($storage_type == 's3' ? 'S3' : 'Server') . '. Links:
' . + 'Direct: ' . $final_url . '
' . + 'Short: ' . $my_short_url['shorturl'] . ''; + } else { + $error = isset($_FILES['file_upload']) ? $_FILES['file_upload']['error'] : 'Unknown error'; + return 'Upload failed, sorry! The error was ' . $error . ''; + } +}