Migrate server to ESM

Sorry for the very big commit that may lead to git log issues and merge
conflicts, but it's a major step forward:

 * Server can be faster at startup because imports() are async and we can
   easily lazy import big modules
 * Angular doesn't seem to support ES import (with .js extension), so we
   had to correctly organize peertube into a monorepo:
    * Use yarn workspace feature
    * Use typescript reference projects for dependencies
    * Shared projects have been moved into "packages", each one is now a
      node module (with a dedicated package.json/tsconfig.json)
    * server/tools have been moved into apps/ and is now a dedicated app
      bundled and published on NPM so users don't have to build peertube
      cli tools manually
    * server/tests have been moved into packages/ so we don't compile
      them every time we want to run the server
 * Use isolatedModule option:
   * Had to move from const enum to const
     (https://www.typescriptlang.org/docs/handbook/enums.html#objects-vs-enums)
   * Had to explictely specify "type" imports when used in decorators
 * Prefer tsx (that uses esbuild under the hood) instead of ts-node to
   load typescript files (tests with mocha or scripts):
     * To reduce test complexity as esbuild doesn't support decorator
       metadata, we only test server files that do not import server
       models
     * We still build tests files into js files for a faster CI
 * Remove unmaintained peertube CLI import script
 * Removed some barrels to speed up execution (less imports)
This commit is contained in:
Chocobozzz
2023-07-31 14:34:36 +02:00
parent 04d1da5621
commit 3a4992633e
2196 changed files with 12690 additions and 11574 deletions

View File

@@ -1,8 +1,14 @@
import autocannon, { printResult } from 'autocannon'
import { program } from 'commander'
import { writeJson } from 'fs-extra'
import { Video, VideoPrivacy } from '@shared/models'
import { createMultipleServers, doubleFollow, killallServers, PeerTubeServer, setAccessTokensToServers } from '@shared/server-commands'
import { writeJson } from 'fs-extra/esm'
import { Video, VideoPrivacy } from '@peertube/peertube-models'
import {
createMultipleServers,
doubleFollow,
killallServers,
PeerTubeServer,
setAccessTokensToServers
} from '@peertube/peertube-server-commands'
let servers: PeerTubeServer[]
// First server

View File

@@ -0,0 +1,12 @@
#!/bin/bash
set -eu
cd ./apps/peertube-cli
rm -rf ./dist
../../node_modules/.bin/tsc -b --verbose
rm -rf ./dist
mkdir ./dist
node ./scripts/build.js

View File

@@ -2,12 +2,11 @@
set -eu
cd ./packages/peertube-runner
cd ./apps/peertube-runner
rm -rf ./dist
../../node_modules/.bin/tsc -b --verbose
rm -rf ./dist
mkdir ./dist
./node_modules/.bin/esbuild ./peertube-runner.ts --bundle --platform=node --target=node16 --external:"./lib-cov/fluent-ffmpeg" --external:pg-hstore --outfile=dist/peertube-runner.js
node ./scripts/build.js

View File

@@ -2,10 +2,11 @@
set -eu
rm -rf ./dist
rm -rf ./dist ./packages/*/dist
npm run tsc -- -b --verbose
npm run tsc -- -b --verbose server/tsconfig.json
npm run resolve-tspaths:server
cp -r "./server/static" "./server/assets" "./dist/server"
cp -r "./server/lib/emails" "./dist/server/lib"
cp -r "./server/server/static" "./server/server/assets" ./dist/server
cp -r "./server/server/lib/emails" "./dist/server/lib"
cp "./server/scripts/upgrade.sh" "./dist/scripts"

9
scripts/build/tests.sh Executable file
View File

@@ -0,0 +1,9 @@
#!/bin/bash
set -eu
rm -rf ./packages/tests/dist
npm run tsc -- -b --verbose ./packages/tests/tsconfig.json
npm run resolve-tspaths:server-lib
npm run resolve-tspaths:tests

View File

@@ -10,7 +10,7 @@ fi
retries=3
speedFactor="${2:-1}"
runTest () {
runJSTest () {
jobname=$1
shift
@@ -24,7 +24,7 @@ runTest () {
joblog="$jobname-ci.log"
parallel -j $jobs --retries $retries \
"echo Trying {} >> $joblog; npm run mocha -- -c --timeout 30000 --exit --bail {}" \
"echo Trying {} >> $joblog; npm run mocha -- --timeout 30000 --no-config -c --exit --bail {}" \
::: $files
cat "$joblog" | sort | uniq -c
@@ -32,92 +32,116 @@ runTest () {
}
findTestFiles () {
exception="-not -name index.js"
exception="-not -name index.js -not -name index.ts -not -name *.d.ts"
if [ ! -z ${2+x} ]; then
exception="$exception -not -name $2"
fi
find $1 -type f -name "*.js" $exception | xargs echo
find $1 -type f \( -name "*.js" -o -name "*.ts" \) $exception | xargs echo
}
if [ "$1" = "types-package" ]; then
npm run generate-types-package 0.0.0
npm run tsc -- --noEmit --esModuleInterop packages/types/tests/test.ts
# Test on in independent directory
rm -fr /tmp/types-generator
mkdir -p /tmp/types-generator
cp -r packages/types-generator/tests /tmp/types-generator/tests
cp -r packages/types-generator/dist /tmp/types-generator/dist
(cd /tmp/types-generator/dist && npm install)
npm run tsc -- --noEmit --esModuleInterop --moduleResolution node16 /tmp/types-generator/tests/test.ts
rm -r /tmp/types-generator
elif [ "$1" = "client" ]; then
npm run build
npm run build:tests
feedsFiles=$(findTestFiles ./dist/server/tests/feeds)
helperFiles=$(findTestFiles ./dist/server/tests/helpers)
libFiles=$(findTestFiles ./dist/server/tests/lib)
miscFiles="./dist/server/tests/client.js ./dist/server/tests/misc-endpoints.js"
feedsFiles=$(findTestFiles ./packages/tests/dist/feeds)
miscFiles="./packages/tests/dist/client.js ./packages/tests/dist/misc-endpoints.js"
# Not in their own task, they need an index.html
pluginFiles="./dist/server/tests/plugins/html-injection.js ./dist/server/tests/api/server/plugins.js"
pluginFiles="./packages/tests/dist/plugins/html-injection.js ./packages/tests/dist/api/server/plugins.js"
MOCHA_PARALLEL=true runTest "$1" $((2*$speedFactor)) $feedsFiles $helperFiles $miscFiles $pluginFiles $libFiles
MOCHA_PARALLEL=true runJSTest "$1" $((2*$speedFactor)) $feedsFiles $miscFiles $pluginFiles
# Use TS tests directly because we import server files
helperFiles=$(findTestFiles ./packages/tests/src/server-helpers)
libFiles=$(findTestFiles ./packages/tests/src/server-lib)
npm run mocha -- --timeout 30000 -c --exit --bail $libFiles $helperFiles
elif [ "$1" = "cli-plugin" ]; then
# Simulate HTML
mkdir -p "./client/dist/en-US/"
cp "./client/src/index.html" "./client/dist/en-US/index.html"
npm run build:server
npm run setup:cli
npm run build:tests
npm run build:peertube-cli
pluginsFiles=$(findTestFiles ./dist/server/tests/plugins html-injection.js)
cliFiles=$(findTestFiles ./dist/server/tests/cli)
# html-injection test needs an HTML file
pluginsFiles=$(findTestFiles ./packages/tests/dist/plugins html-injection.js)
cliFiles=$(findTestFiles ./packages/tests/dist/cli)
MOCHA_PARALLEL=true runTest "$1" $((2*$speedFactor)) $pluginsFiles
runTest "$1" 1 $cliFiles
MOCHA_PARALLEL=true runJSTest "$1" $((2*$speedFactor)) $pluginsFiles
runJSTest "$1" 1 $cliFiles
elif [ "$1" = "api-1" ]; then
npm run build:server
npm run build:tests
checkParamFiles=$(findTestFiles ./dist/server/tests/api/check-params)
notificationsFiles=$(findTestFiles ./dist/server/tests/api/notifications)
searchFiles=$(findTestFiles ./dist/server/tests/api/search)
checkParamFiles=$(findTestFiles ./packages/tests/dist/api/check-params)
notificationsFiles=$(findTestFiles ./packages/tests/dist/api/notifications)
searchFiles=$(findTestFiles ./packages/tests/dist/api/search)
MOCHA_PARALLEL=true runTest "$1" $((3*$speedFactor)) $notificationsFiles $searchFiles $checkParamFiles
MOCHA_PARALLEL=true runJSTest "$1" $((3*$speedFactor)) $notificationsFiles $searchFiles $checkParamFiles
elif [ "$1" = "api-2" ]; then
npm run build:server
npm run build:tests
liveFiles=$(findTestFiles ./dist/server/tests/api/live)
serverFiles=$(findTestFiles ./dist/server/tests/api/server plugins.js)
usersFiles=$(findTestFiles ./dist/server/tests/api/users)
liveFiles=$(findTestFiles ./packages/tests/dist/api/live)
# plugins test needs an HTML file
serverFiles=$(findTestFiles ./packages/tests/dist/api/server plugins.js)
usersFiles=$(findTestFiles ./packages/tests/dist/api/users)
MOCHA_PARALLEL=true runTest "$1" $((3*$speedFactor)) $liveFiles $serverFiles $usersFiles
MOCHA_PARALLEL=true runJSTest "$1" $((3*$speedFactor)) $liveFiles $serverFiles $usersFiles
elif [ "$1" = "api-3" ]; then
npm run build:server
npm run build:tests
videosFiles=$(findTestFiles ./dist/server/tests/api/videos)
viewsFiles=$(findTestFiles ./dist/server/tests/api/views)
videosFiles=$(findTestFiles ./packages/tests/dist/api/videos)
viewsFiles=$(findTestFiles ./packages/tests/dist/api/views)
MOCHA_PARALLEL=true runTest "$1" $((3*$speedFactor)) $viewsFiles $videosFiles
MOCHA_PARALLEL=true runJSTest "$1" $((3*$speedFactor)) $viewsFiles $videosFiles
elif [ "$1" = "api-4" ]; then
npm run build:server
npm run build:tests
moderationFiles=$(findTestFiles ./dist/server/tests/api/moderation)
redundancyFiles=$(findTestFiles ./dist/server/tests/api/redundancy)
objectStorageFiles=$(findTestFiles ./dist/server/tests/api/object-storage)
activitypubFiles=$(findTestFiles ./dist/server/tests/api/activitypub)
moderationFiles=$(findTestFiles ./packages/tests/dist/api/moderation)
redundancyFiles=$(findTestFiles ./packages/tests/dist/api/redundancy)
objectStorageFiles=$(findTestFiles ./packages/tests/dist/api/object-storage)
activitypubFiles=$(findTestFiles ./packages/tests/dist/api/activitypub)
MOCHA_PARALLEL=true runTest "$1" $((2*$speedFactor)) $moderationFiles $redundancyFiles $activitypubFiles $objectStorageFiles
MOCHA_PARALLEL=true runJSTest "$1" $((2*$speedFactor)) $moderationFiles $redundancyFiles $activitypubFiles $objectStorageFiles
elif [ "$1" = "api-5" ]; then
npm run build:server
npm run build:tests
transcodingFiles=$(findTestFiles ./dist/server/tests/api/transcoding)
runnersFiles=$(findTestFiles ./dist/server/tests/api/runners)
transcodingFiles=$(findTestFiles ./packages/tests/dist/api/transcoding)
runnersFiles=$(findTestFiles ./packages/tests/dist/api/runners)
MOCHA_PARALLEL=true runTest "$1" $((2*$speedFactor)) $transcodingFiles $runnersFiles
MOCHA_PARALLEL=true runJSTest "$1" $((2*$speedFactor)) $transcodingFiles $runnersFiles
elif [ "$1" = "external-plugins" ]; then
npm run build:server
npm run build:tests
npm run build:peertube-runner
externalPluginsFiles=$(findTestFiles ./dist/server/tests/external-plugins)
peertubeRunnerFiles=$(findTestFiles ./dist/server/tests/peertube-runner)
externalPluginsFiles=$(findTestFiles ./packages/tests/dist/external-plugins)
peertubeRunnerFiles=$(findTestFiles ./packages/tests/dist/peertube-runner)
runTest "$1" 1 $externalPluginsFiles
MOCHA_PARALLEL=true runTest "$1" $((2*$speedFactor)) $peertubeRunnerFiles
runJSTest "$1" 1 $externalPluginsFiles
MOCHA_PARALLEL=true runJSTest "$1" $((2*$speedFactor)) $peertubeRunnerFiles
elif [ "$1" = "lint" ]; then
npm run eslint -- --ext .ts "./server/**/*.ts" "shared/**/*.ts" "scripts/**/*.ts"
npm run eslint -- --ext .ts "server/**/*.ts" "scripts/**/*.ts" "packages/**/*.ts" "apps/**/*.ts"
npm run swagger-cli -- validate support/doc/api/openapi.yaml
( cd client

View File

@@ -1,6 +1,6 @@
import { readdir, stat } from 'fs-extra'
import { readdir, stat } from 'fs/promises'
import { join } from 'path'
import { root } from '@shared/core-utils'
import { root } from '@peertube/peertube-node-utils'
async function run () {
const result = {

View File

@@ -1,85 +0,0 @@
import { program } from 'commander'
import { toCompleteUUID } from '@server/helpers/custom-validators/misc'
import { initDatabaseModels } from '@server/initializers/database'
import { JobQueue } from '@server/lib/job-queue'
import { VideoModel } from '@server/models/video/video'
import { StoryboardModel } from '@server/models/video/storyboard'
program
.description('Generate videos storyboard')
.option('-v, --video [videoUUID]', 'Generate the storyboard of a specific video')
.option('-a, --all-videos', 'Generate missing storyboards of local videos')
.parse(process.argv)
const options = program.opts()
if (!options['video'] && !options['allVideos']) {
console.error('You need to choose videos for storyboard generation.')
process.exit(-1)
}
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
await initDatabaseModels(true)
JobQueue.Instance.init()
let ids: number[] = []
if (options['video']) {
const video = await VideoModel.load(toCompleteUUID(options['video']))
if (!video) {
console.error('Unknown video ' + options['video'])
process.exit(-1)
}
if (video.remote === true) {
console.error('Cannot process a remote video')
process.exit(-1)
}
if (video.isLive) {
console.error('Cannot process live video')
process.exit(-1)
}
ids.push(video.id)
} else {
ids = await listLocalMissingStoryboards()
}
for (const id of ids) {
const videoFull = await VideoModel.load(id)
if (videoFull.isLive) continue
await JobQueue.Instance.createJob({
type: 'generate-video-storyboard',
payload: {
videoUUID: videoFull.uuid,
federate: true
}
})
console.log(`Created generate-storyboard job for ${videoFull.name}.`)
}
}
async function listLocalMissingStoryboards () {
const ids = await VideoModel.listLocalIds()
const results: number[] = []
for (const id of ids) {
const storyboard = await StoryboardModel.loadByVideo(id)
if (!storyboard) results.push(id)
}
return results
}

View File

@@ -1,50 +0,0 @@
import { program } from 'commander'
import { resolve } from 'path'
import { isUUIDValid, toCompleteUUID } from '@server/helpers/custom-validators/misc'
import { initDatabaseModels } from '../server/initializers/database'
import { JobQueue } from '../server/lib/job-queue'
import { VideoModel } from '../server/models/video/video'
program
.option('-v, --video [videoUUID]', 'Video UUID')
.option('-i, --import [videoFile]', 'Video file')
.description('Import a video file to replace an already uploaded file or to add a new resolution')
.parse(process.argv)
const options = program.opts()
if (options.video === undefined || options.import === undefined) {
console.error('All parameters are mandatory.')
process.exit(-1)
}
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
await initDatabaseModels(true)
const uuid = toCompleteUUID(options.video)
if (isUUIDValid(uuid) === false) {
console.error('%s is not a valid video UUID.', options.video)
return
}
const video = await VideoModel.load(uuid)
if (!video) throw new Error('Video not found.')
if (video.isOwned() === false) throw new Error('Cannot import files of a non owned video.')
const dataInput = {
videoUUID: video.uuid,
filePath: resolve(options.import)
}
JobQueue.Instance.init()
await JobQueue.Instance.createJob({ type: 'video-file-import', payload: dataInput })
console.log('Import job for video %s created.', video.uuid)
}

View File

@@ -1,99 +0,0 @@
import { program } from 'commander'
import { toCompleteUUID } from '@server/helpers/custom-validators/misc'
import { CONFIG } from '@server/initializers/config'
import { initDatabaseModels } from '@server/initializers/database'
import { JobQueue } from '@server/lib/job-queue'
import { moveToExternalStorageState } from '@server/lib/video-state'
import { VideoModel } from '@server/models/video/video'
import { VideoState, VideoStorage } from '@shared/models'
program
.description('Move videos to another storage.')
.option('-o, --to-object-storage', 'Move videos in object storage')
.option('-v, --video [videoUUID]', 'Move a specific video')
.option('-a, --all-videos', 'Migrate all videos')
.parse(process.argv)
const options = program.opts()
if (!options['toObjectStorage']) {
console.error('You need to choose where to send video files.')
process.exit(-1)
}
if (!options['video'] && !options['allVideos']) {
console.error('You need to choose which videos to move.')
process.exit(-1)
}
if (options['toObjectStorage'] && !CONFIG.OBJECT_STORAGE.ENABLED) {
console.error('Object storage is not enabled on this instance.')
process.exit(-1)
}
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
await initDatabaseModels(true)
JobQueue.Instance.init()
let ids: number[] = []
if (options['video']) {
const video = await VideoModel.load(toCompleteUUID(options['video']))
if (!video) {
console.error('Unknown video ' + options['video'])
process.exit(-1)
}
if (video.remote === true) {
console.error('Cannot process a remote video')
process.exit(-1)
}
if (video.isLive) {
console.error('Cannot process live video')
process.exit(-1)
}
if (video.state === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) {
console.error('This video is already being moved to external storage')
process.exit(-1)
}
ids.push(video.id)
} else {
ids = await VideoModel.listLocalIds()
}
for (const id of ids) {
const videoFull = await VideoModel.loadFull(id)
if (videoFull.isLive) continue
const files = videoFull.VideoFiles || []
const hls = videoFull.getHLSPlaylist()
if (files.some(f => f.storage === VideoStorage.FILE_SYSTEM) || hls?.storage === VideoStorage.FILE_SYSTEM) {
console.log('Processing video %s.', videoFull.name)
const success = await moveToExternalStorageState({ video: videoFull, isNewVideo: false, transaction: undefined })
if (!success) {
console.error(
'Cannot create move job for %s: job creation may have failed or there may be pending transcoding jobs for this video',
videoFull.name
)
}
}
console.log(`Created move-to-object-storage job for ${videoFull.name}.`)
}
}

View File

@@ -1,16 +0,0 @@
#!/bin/bash
set -eu
rm -rf ./dist/server/tools/
(
cd ./server/tools
yarn install --pure-lockfile
)
mkdir -p "./dist/server/tools"
cp -r "./server/tools/node_modules" "./dist/server/tools"
cd ./server/tools
../../node_modules/.bin/tsc-watch --build --verbose --onSuccess 'sh -c "cd ../../ && npm run resolve-tspaths:server"'

11
scripts/dev/peertube-cli.sh Executable file
View File

@@ -0,0 +1,11 @@
#!/bin/bash
set -eu
rm -rf ./apps/peertube-cli/dist
cd ./apps/peertube-cli
../../node_modules/.bin/concurrently -k \
"../../node_modules/.bin/tsc -w --noEmit" \
"node ./scripts/watch.js"

View File

@@ -2,10 +2,10 @@
set -eu
rm -rf ./packages/peertube-runner/dist
rm -rf ./apps/peertube-runner/dist
cd ./packages/peertube-runner
cd ./apps/peertube-runner
../../node_modules/.bin/concurrently -k \
"../../node_modules/.bin/tsc -w --noEmit" \
"./node_modules/.bin/esbuild ./peertube-runner.ts --bundle --sourcemap --platform=node --external:"./lib-cov/fluent-ffmpeg" --external:pg-hstore --watch --outfile=dist/peertube-runner.js"
"node ./scripts/watch.js"

View File

@@ -16,10 +16,10 @@ cp -r "./client/src/locale" "./client/dist/locale"
mkdir -p "./dist/server/lib"
npm run tsc -- -b -v --incremental
npm run tsc -- -b -v --incremental server/tsconfig.json
npm run resolve-tspaths:server
cp -r ./server/static ./server/assets ./dist/server
cp -r "./server/lib/emails" "./dist/server/lib"
cp -r ./server/server/static ./server/server/assets ./dist/server
cp -r "./server/server/lib/emails" "./dist/server/lib"
./node_modules/.bin/tsc-watch --build --preserveWatchOutput --verbose --onSuccess 'sh -c "npm run resolve-tspaths:server && NODE_ENV=dev node dist/server"'
./node_modules/.bin/tsc-watch --build --preserveWatchOutput --verbose --onSuccess 'sh -c "npm run resolve-tspaths:server && NODE_ENV=dev node dist/server"' server/tsconfig.json

View File

@@ -1,4 +1,4 @@
import { CLICommand } from '@shared/server-commands'
import { CLICommand } from '@peertube/peertube-server-commands'
run()
.then(() => process.exit(0))

View File

@@ -1,6 +1,7 @@
import { writeJSON } from 'fs-extra'
import { readJsonSync, writeJSON } from 'fs-extra/esm'
import { join } from 'path'
import { root, USER_ROLE_LABELS } from '@shared/core-utils'
import { I18N_LOCALES, USER_ROLE_LABELS } from '@peertube/peertube-core-utils'
import { root } from '@peertube/peertube-node-utils'
import {
ABUSE_STATES,
buildLanguages,
@@ -14,10 +15,9 @@ import {
VIDEO_PLAYLIST_TYPES,
VIDEO_PRIVACIES,
VIDEO_STATES
} from '../../server/initializers/constants'
import { I18N_LOCALES } from '../../shared/core-utils/i18n'
} from '../../server/initializers/constants.js'
const videojs = require(join(root(), 'client', 'src', 'locale', 'videojs.en-US.json'))
const videojs = readJsonSync(join(root(), 'client', 'src', 'locale', 'videojs.en-US.json'))
const playerKeys = {
'Quality': 'Quality',
'Auto': 'Auto',
@@ -131,13 +131,13 @@ async function writeAll () {
for (const key of Object.keys(I18N_LOCALES)) {
const playerJsonPath = join(localePath, `player.${key}.json`)
const translatedPlayer = require(playerJsonPath)
const translatedPlayer = readJsonSync(playerJsonPath)
const newTranslatedPlayer = Object.assign({}, playerKeys, translatedPlayer)
await writeJSON(playerJsonPath, newTranslatedPlayer, { spaces: 4 })
const serverJsonPath = join(localePath, `server.${key}.json`)
const translatedServer = require(serverJsonPath)
const translatedServer = readJsonSync(serverJsonPath)
const newTranslatedServer = Object.assign({}, serverKeys, translatedServer)
await writeJSON(serverJsonPath, newTranslatedServer, { spaces: 4 })

View File

@@ -1,104 +0,0 @@
import Bluebird from 'bluebird'
import { move, readFile, writeFile } from 'fs-extra'
import { join } from 'path'
import { federateVideoIfNeeded } from '@server/lib/activitypub/videos'
import { JobQueue } from '@server/lib/job-queue'
import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename, getHlsResolutionPlaylistFilename } from '@server/lib/paths'
import { VideoPathManager } from '@server/lib/video-path-manager'
import { VideoModel } from '@server/models/video/video'
import { VideoStreamingPlaylistModel } from '@server/models/video/video-streaming-playlist'
import { initDatabaseModels } from '../../server/initializers/database'
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
console.log('Migrate old HLS paths to new format.')
await initDatabaseModels(true)
JobQueue.Instance.init()
const ids = await VideoModel.listLocalIds()
await Bluebird.map(ids, async id => {
try {
await processVideo(id)
} catch (err) {
console.error('Cannot process video %s.', { err })
}
}, { concurrency: 5 })
console.log('Migration finished!')
}
async function processVideo (videoId: number) {
const video = await VideoModel.loadWithFiles(videoId)
const hls = video.getHLSPlaylist()
if (video.isLive || !hls || hls.playlistFilename !== 'master.m3u8' || hls.VideoFiles.length === 0) {
return
}
console.log(`Renaming HLS playlist files of video ${video.name}.`)
const playlist = await VideoStreamingPlaylistModel.loadHLSPlaylistByVideo(video.id)
const hlsDirPath = VideoPathManager.Instance.getFSHLSOutputPath(video)
const masterPlaylistPath = join(hlsDirPath, playlist.playlistFilename)
let masterPlaylistContent = await readFile(masterPlaylistPath, 'utf8')
for (const videoFile of hls.VideoFiles) {
const srcName = `${videoFile.resolution}.m3u8`
const dstName = getHlsResolutionPlaylistFilename(videoFile.filename)
const src = join(hlsDirPath, srcName)
const dst = join(hlsDirPath, dstName)
try {
await move(src, dst)
masterPlaylistContent = masterPlaylistContent.replace(new RegExp('^' + srcName + '$', 'm'), dstName)
} catch (err) {
console.error('Cannot move video file %s to %s.', src, dst, err)
}
}
await writeFile(masterPlaylistPath, masterPlaylistContent)
if (playlist.segmentsSha256Filename === 'segments-sha256.json') {
try {
const newName = generateHlsSha256SegmentsFilename(video.isLive)
const dst = join(hlsDirPath, newName)
await move(join(hlsDirPath, playlist.segmentsSha256Filename), dst)
playlist.segmentsSha256Filename = newName
} catch (err) {
console.error(`Cannot rename ${video.name} segments-sha256.json file to a new name`, err)
}
}
if (playlist.playlistFilename === 'master.m3u8') {
try {
const newName = generateHLSMasterPlaylistFilename(video.isLive)
const dst = join(hlsDirPath, newName)
await move(join(hlsDirPath, playlist.playlistFilename), dst)
playlist.playlistFilename = newName
} catch (err) {
console.error(`Cannot rename ${video.name} master.m3u8 file to a new name`, err)
}
}
// Everything worked, we can save the playlist now
await playlist.save()
const allVideo = await VideoModel.loadFull(video.id)
await federateVideoIfNeeded(allVideo, false)
console.log(`Successfully moved HLS files of ${video.name}.`)
}

View File

@@ -1,124 +0,0 @@
import { minBy } from 'lodash'
import { join } from 'path'
import { getImageSize, processImage } from '@server/helpers/image-utils'
import { CONFIG } from '@server/initializers/config'
import { ACTOR_IMAGES_SIZE } from '@server/initializers/constants'
import { updateActorImages } from '@server/lib/activitypub/actors'
import { sendUpdateActor } from '@server/lib/activitypub/send'
import { getBiggestActorImage } from '@server/lib/actor-image'
import { JobQueue } from '@server/lib/job-queue'
import { AccountModel } from '@server/models/account/account'
import { ActorModel } from '@server/models/actor/actor'
import { VideoChannelModel } from '@server/models/video/video-channel'
import { MAccountDefault, MActorDefault, MChannelDefault } from '@server/types/models'
import { getLowercaseExtension } from '@shared/core-utils'
import { buildUUID } from '@shared/extra-utils'
import { ActorImageType } from '@shared/models'
import { initDatabaseModels } from '../../server/initializers/database'
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
console.log('Generate avatar miniatures from existing avatars.')
await initDatabaseModels(true)
JobQueue.Instance.init()
const accounts: AccountModel[] = await AccountModel.findAll({
include: [
{
model: ActorModel,
required: true,
where: {
serverId: null
}
},
{
model: VideoChannelModel,
include: [
{
model: AccountModel
}
]
}
]
})
for (const account of accounts) {
try {
await fillAvatarSizeIfNeeded(account)
await generateSmallerAvatarIfNeeded(account)
} catch (err) {
console.error(`Cannot process account avatar ${account.name}`, err)
}
for (const videoChannel of account.VideoChannels) {
try {
await fillAvatarSizeIfNeeded(videoChannel)
await generateSmallerAvatarIfNeeded(videoChannel)
} catch (err) {
console.error(`Cannot process channel avatar ${videoChannel.name}`, err)
}
}
}
console.log('Generation finished!')
}
async function fillAvatarSizeIfNeeded (accountOrChannel: MAccountDefault | MChannelDefault) {
const avatars = accountOrChannel.Actor.Avatars
for (const avatar of avatars) {
if (avatar.width && avatar.height) continue
console.log('Filling size of avatars of %s.', accountOrChannel.name)
const { width, height } = await getImageSize(join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, avatar.filename))
avatar.width = width
avatar.height = height
await avatar.save()
}
}
async function generateSmallerAvatarIfNeeded (accountOrChannel: MAccountDefault | MChannelDefault) {
const avatars = accountOrChannel.Actor.Avatars
if (avatars.length !== 1) {
return
}
console.log(`Processing ${accountOrChannel.name}.`)
await generateSmallerAvatar(accountOrChannel.Actor)
accountOrChannel.Actor = Object.assign(accountOrChannel.Actor, { Server: null })
return sendUpdateActor(accountOrChannel, undefined)
}
async function generateSmallerAvatar (actor: MActorDefault) {
const bigAvatar = getBiggestActorImage(actor.Avatars)
const imageSize = minBy(ACTOR_IMAGES_SIZE[ActorImageType.AVATAR], 'width')
const sourceFilename = bigAvatar.filename
const newImageName = buildUUID() + getLowercaseExtension(sourceFilename)
const source = join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, sourceFilename)
const destination = join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, newImageName)
await processImage({ path: source, destination, newSize: imageSize, keepOriginal: true })
const actorImageInfo = {
name: newImageName,
fileUrl: null,
height: imageSize.height,
width: imageSize.width,
onDisk: true
}
await updateActorImages(actor, ActorImageType.AVATAR, [ actorImageInfo ], undefined)
}

View File

@@ -1,71 +0,0 @@
import { ensureDir } from 'fs-extra'
import { Op } from 'sequelize'
import { updateTorrentMetadata } from '@server/helpers/webtorrent'
import { DIRECTORIES } from '@server/initializers/constants'
import { moveFilesIfPrivacyChanged } from '@server/lib/video-privacy'
import { VideoModel } from '@server/models/video/video'
import { MVideoFullLight } from '@server/types/models'
import { VideoPrivacy } from '@shared/models'
import { initDatabaseModels } from '../../server/initializers/database'
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
console.log('Moving private video files in dedicated folders.')
await ensureDir(DIRECTORIES.HLS_STREAMING_PLAYLIST.PRIVATE)
await ensureDir(DIRECTORIES.VIDEOS.PRIVATE)
await initDatabaseModels(true)
const videos = await VideoModel.unscoped().findAll({
attributes: [ 'uuid' ],
where: {
privacy: {
[Op.in]: [ VideoPrivacy.PRIVATE, VideoPrivacy.INTERNAL ]
}
}
})
for (const { uuid } of videos) {
try {
console.log('Moving files of video %s.', uuid)
const video = await VideoModel.loadFull(uuid)
try {
await moveFilesIfPrivacyChanged(video, VideoPrivacy.PUBLIC)
} catch (err) {
console.error('Cannot move files of video %s.', uuid, err)
}
try {
await updateTorrents(video)
} catch (err) {
console.error('Cannot regenerate torrents of video %s.', uuid, err)
}
} catch (err) {
console.error('Cannot process video %s.', uuid, err)
}
}
}
async function updateTorrents (video: MVideoFullLight) {
for (const file of video.VideoFiles) {
await updateTorrentMetadata(video, file)
await file.save()
}
const playlist = video.getHLSPlaylist()
for (const file of (playlist?.VideoFiles || [])) {
await updateTorrentMetadata(playlist, file)
await file.save()
}
}

View File

@@ -20,6 +20,13 @@ tar_name="peertube-nightly-$today.tar.xz"
npm run build -- --source-map
# Clean up declaration files
find dist/ packages/core-utils/dist/ \
packages/ffmpeg/dist/ \
packages/node-utils/dist/ \
packages/models/dist/ \
\( -name '*.d.ts' -o -name '*.d.ts.map' \) -type f -delete
nightly_version="nightly-$today"
sed -i 's/"version": "\([^"]\+\)"/"version": "\1-'"$nightly_version"'"/' ./package.json
@@ -28,6 +35,10 @@ sed -i 's/"version": "\([^"]\+\)"/"version": "\1-'"$nightly_version"'"/' ./packa
# local variables
directories_to_archive=("$directory_name/CREDITS.md" "$directory_name/FAQ.md" \
"$directory_name/LICENSE" "$directory_name/README.md" \
"$directory_name/packages/core-utils/dist/" "$directory_name/packages/core-utils/package.json" \
"$directory_name/packages/ffmpeg/dist/" "$directory_name/packages/ffmpeg/package.json" \
"$directory_name/packages/node-utils/dist/" "$directory_name/packages/node-utils/package.json" \
"$directory_name/packages/models/dist/" "$directory_name/packages/models/package.json" \
"$directory_name/client/dist/" "$directory_name/client/yarn.lock" \
"$directory_name/client/package.json" "$directory_name/config" \
"$directory_name/dist" "$directory_name/package.json" \

View File

@@ -1,160 +0,0 @@
import { program } from 'commander'
import { createReadStream, readdir } from 'fs-extra'
import { join } from 'path'
import { stdin } from 'process'
import { createInterface } from 'readline'
import { format as sqlFormat } from 'sql-formatter'
import { inspect } from 'util'
import * as winston from 'winston'
import { labelFormatter, mtimeSortFilesDesc } from '../server/helpers/logger'
import { CONFIG } from '../server/initializers/config'
program
.option('-l, --level [level]', 'Level log (debug/info/warn/error)')
.option('-f, --files [file...]', 'Files to parse. If not provided, the script will parse the latest log file from config)')
.option('-t, --tags [tags...]', 'Display only lines with these tags')
.option('-nt, --not-tags [tags...]', 'Donrt display lines containing these tags')
.parse(process.argv)
const options = program.opts()
const excludedKeys = {
level: true,
message: true,
splat: true,
timestamp: true,
tags: true,
label: true,
sql: true
}
function keysExcluder (key, value) {
return excludedKeys[key] === true ? undefined : value
}
const loggerFormat = winston.format.printf((info) => {
let additionalInfos = JSON.stringify(info, keysExcluder, 2)
if (additionalInfos === '{}') additionalInfos = ''
else additionalInfos = ' ' + additionalInfos
if (info.sql) {
if (CONFIG.LOG.PRETTIFY_SQL) {
additionalInfos += '\n' + sqlFormat(info.sql, {
language: 'sql',
tabWidth: 2
})
} else {
additionalInfos += ' - ' + info.sql
}
}
return `[${info.label}] ${toTimeFormat(info.timestamp)} ${info.level}: ${info.message}${additionalInfos}`
})
const logger = winston.createLogger({
transports: [
new winston.transports.Console({
level: options.level || 'debug',
stderrLevels: [],
format: winston.format.combine(
winston.format.splat(),
labelFormatter(),
winston.format.colorize(),
loggerFormat
)
})
],
exitOnError: true
})
const logLevels = {
error: logger.error.bind(logger),
warn: logger.warn.bind(logger),
info: logger.info.bind(logger),
debug: logger.debug.bind(logger)
}
run()
.then(() => process.exit(0))
.catch(err => console.error(err))
async function run () {
const files = await getFiles()
for (const file of files) {
if (file === 'peertube-audit.log') continue
await readFile(file)
}
}
function readFile (file: string) {
console.log('Opening %s.', file)
const stream = file === '-' ? stdin : createReadStream(file)
const rl = createInterface({
input: stream
})
return new Promise<void>(res => {
rl.on('line', line => {
try {
const log = JSON.parse(line)
if (options.tags && !containsTags(log.tags, options.tags)) {
return
}
if (options.notTags && containsTags(log.tags, options.notTags)) {
return
}
// Don't know why but loggerFormat does not remove splat key
Object.assign(log, { splat: undefined })
logLevels[log.level](log)
} catch (err) {
console.error('Cannot parse line.', inspect(line))
throw err
}
})
stream.once('end', () => res())
})
}
// Thanks: https://stackoverflow.com/a/37014317
async function getNewestFile (files: string[], basePath: string) {
const sorted = await mtimeSortFilesDesc(files, basePath)
return (sorted.length > 0) ? sorted[0].file : ''
}
async function getFiles () {
if (options.files) return options.files
const logFiles = await readdir(CONFIG.STORAGE.LOG_DIR)
const filename = await getNewestFile(logFiles, CONFIG.STORAGE.LOG_DIR)
return [ join(CONFIG.STORAGE.LOG_DIR, filename) ]
}
function toTimeFormat (time: string) {
const timestamp = Date.parse(time)
if (isNaN(timestamp) === true) return 'Unknown date'
const d = new Date(timestamp)
return d.toLocaleString() + `.${d.getMilliseconds()}`
}
function containsTags (loggerTags: string[], optionsTags: string[]) {
if (!loggerTags) return false
for (const lt of loggerTags) {
for (const ot of optionsTags) {
if (lt === ot) return true
}
}
return false
}

View File

@@ -1,41 +0,0 @@
import { program } from 'commander'
import { isAbsolute } from 'path'
import { initDatabaseModels } from '../../server/initializers/database'
import { PluginManager } from '../../server/lib/plugins/plugin-manager'
program
.option('-n, --npm-name [npmName]', 'Plugin to install')
.option('-v, --plugin-version [pluginVersion]', 'Plugin version to install')
.option('-p, --plugin-path [pluginPath]', 'Path of the plugin you want to install')
.parse(process.argv)
const options = program.opts()
if (!options.npmName && !options.pluginPath) {
console.error('You need to specify a plugin name with the desired version, or a plugin path.')
process.exit(-1)
}
if (options.pluginPath && !isAbsolute(options.pluginPath)) {
console.error('Plugin path should be absolute.')
process.exit(-1)
}
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
await initDatabaseModels(true)
const toInstall = options.npmName || options.pluginPath
await PluginManager.Instance.install({
toInstall,
version: options.pluginVersion,
fromDisk: !!options.pluginPath,
register: false
})
}

View File

@@ -1,29 +0,0 @@
import { program } from 'commander'
import { initDatabaseModels } from '../../server/initializers/database'
import { PluginManager } from '../../server/lib/plugins/plugin-manager'
program
.option('-n, --npm-name [npmName]', 'Package name to install')
.parse(process.argv)
const options = program.opts()
if (!options.npmName) {
console.error('You need to specify the plugin name.')
process.exit(-1)
}
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
await initDatabaseModels(true)
const toUninstall = options.npmName
await PluginManager.Instance.uninstall({ npmName: toUninstall, unregister: false })
}

View File

@@ -1,184 +0,0 @@
import { map } from 'bluebird'
import { readdir, remove, stat } from 'fs-extra'
import { basename, join } from 'path'
import { get, start } from 'prompt'
import { DIRECTORIES } from '@server/initializers/constants'
import { VideoFileModel } from '@server/models/video/video-file'
import { VideoStreamingPlaylistModel } from '@server/models/video/video-streaming-playlist'
import { uniqify } from '@shared/core-utils'
import { ThumbnailType } from '@shared/models'
import { getUUIDFromFilename } from '../server/helpers/utils'
import { CONFIG } from '../server/initializers/config'
import { initDatabaseModels } from '../server/initializers/database'
import { ActorImageModel } from '../server/models/actor/actor-image'
import { VideoRedundancyModel } from '../server/models/redundancy/video-redundancy'
import { ThumbnailModel } from '../server/models/video/thumbnail'
import { VideoModel } from '../server/models/video/video'
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
const dirs = Object.values(CONFIG.STORAGE)
if (uniqify(dirs).length !== dirs.length) {
console.error('Cannot prune storage because you put multiple storage keys in the same directory.')
process.exit(0)
}
await initDatabaseModels(true)
let toDelete: string[] = []
console.log('Detecting files to remove, it could take a while...')
toDelete = toDelete.concat(
await pruneDirectory(DIRECTORIES.VIDEOS.PUBLIC, doesWebVideoFileExist()),
await pruneDirectory(DIRECTORIES.VIDEOS.PRIVATE, doesWebVideoFileExist()),
await pruneDirectory(DIRECTORIES.HLS_STREAMING_PLAYLIST.PRIVATE, doesHLSPlaylistExist()),
await pruneDirectory(DIRECTORIES.HLS_STREAMING_PLAYLIST.PUBLIC, doesHLSPlaylistExist()),
await pruneDirectory(CONFIG.STORAGE.TORRENTS_DIR, doesTorrentFileExist()),
await pruneDirectory(CONFIG.STORAGE.REDUNDANCY_DIR, doesRedundancyExist),
await pruneDirectory(CONFIG.STORAGE.PREVIEWS_DIR, doesThumbnailExist(true, ThumbnailType.PREVIEW)),
await pruneDirectory(CONFIG.STORAGE.THUMBNAILS_DIR, doesThumbnailExist(false, ThumbnailType.MINIATURE)),
await pruneDirectory(CONFIG.STORAGE.ACTOR_IMAGES_DIR, doesActorImageExist)
)
const tmpFiles = await readdir(CONFIG.STORAGE.TMP_DIR)
toDelete = toDelete.concat(tmpFiles.map(t => join(CONFIG.STORAGE.TMP_DIR, t)))
if (toDelete.length === 0) {
console.log('No files to delete.')
return
}
console.log('Will delete %d files:\n\n%s\n\n', toDelete.length, toDelete.join('\n'))
const res = await askConfirmation()
if (res === true) {
console.log('Processing delete...\n')
for (const path of toDelete) {
await remove(path)
}
console.log('Done!')
} else {
console.log('Exiting without deleting files.')
}
}
type ExistFun = (file: string) => Promise<boolean> | boolean
async function pruneDirectory (directory: string, existFun: ExistFun) {
const files = await readdir(directory)
const toDelete: string[] = []
await map(files, async file => {
const filePath = join(directory, file)
if (await existFun(filePath) !== true) {
toDelete.push(filePath)
}
}, { concurrency: 20 })
return toDelete
}
function doesWebVideoFileExist () {
return (filePath: string) => {
// Don't delete private directory
if (filePath === DIRECTORIES.VIDEOS.PRIVATE) return true
return VideoFileModel.doesOwnedWebVideoFileExist(basename(filePath))
}
}
function doesHLSPlaylistExist () {
return (hlsPath: string) => {
// Don't delete private directory
if (hlsPath === DIRECTORIES.HLS_STREAMING_PLAYLIST.PRIVATE) return true
return VideoStreamingPlaylistModel.doesOwnedHLSPlaylistExist(basename(hlsPath))
}
}
function doesTorrentFileExist () {
return (filePath: string) => VideoFileModel.doesOwnedTorrentFileExist(basename(filePath))
}
function doesThumbnailExist (keepOnlyOwned: boolean, type: ThumbnailType) {
return async (filePath: string) => {
const thumbnail = await ThumbnailModel.loadByFilename(basename(filePath), type)
if (!thumbnail) return false
if (keepOnlyOwned) {
const video = await VideoModel.load(thumbnail.videoId)
if (video.isOwned() === false) return false
}
return true
}
}
async function doesActorImageExist (filePath: string) {
const image = await ActorImageModel.loadByName(basename(filePath))
return !!image
}
async function doesRedundancyExist (filePath: string) {
const isPlaylist = (await stat(filePath)).isDirectory()
if (isPlaylist) {
// Don't delete HLS redundancy directory
if (filePath === DIRECTORIES.HLS_REDUNDANCY) return true
const uuid = getUUIDFromFilename(filePath)
const video = await VideoModel.loadWithFiles(uuid)
if (!video) return false
const p = video.getHLSPlaylist()
if (!p) return false
const redundancy = await VideoRedundancyModel.loadLocalByStreamingPlaylistId(p.id)
return !!redundancy
}
const file = await VideoFileModel.loadByFilename(basename(filePath))
if (!file) return false
const redundancy = await VideoRedundancyModel.loadLocalByFileId(file.id)
return !!redundancy
}
async function askConfirmation () {
return new Promise((res, rej) => {
start()
const schema = {
properties: {
confirm: {
type: 'string',
description: 'These following unused files can be deleted, but please check your backups first (bugs happen).' +
' Notice PeerTube must have been stopped when your ran this script.' +
' Can we delete these files?',
default: 'n',
required: true
}
}
}
get(schema, function (err, result) {
if (err) return rej(err)
return res(result.confirm?.match(/y/) !== null)
})
})
}

View File

@@ -1,64 +0,0 @@
import { map } from 'bluebird'
import { program } from 'commander'
import { pathExists, remove } from 'fs-extra'
import { generateImageFilename, processImage } from '@server/helpers/image-utils'
import { THUMBNAILS_SIZE } from '@server/initializers/constants'
import { initDatabaseModels } from '@server/initializers/database'
import { VideoModel } from '@server/models/video/video'
program
.description('Regenerate local thumbnails using preview files')
.parse(process.argv)
run()
.then(() => process.exit(0))
.catch(err => console.error(err))
async function run () {
await initDatabaseModels(true)
const ids = await VideoModel.listLocalIds()
await map(ids, id => {
return processVideo(id)
.catch(err => console.error('Cannot process video %d.', id, err))
}, { concurrency: 20 })
}
async function processVideo (id: number) {
const video = await VideoModel.loadWithFiles(id)
console.log('Processing video %s.', video.name)
const thumbnail = video.getMiniature()
const preview = video.getPreview()
const previewPath = preview.getPath()
if (!await pathExists(previewPath)) {
throw new Error(`Preview ${previewPath} does not exist on disk`)
}
const size = {
width: THUMBNAILS_SIZE.width,
height: THUMBNAILS_SIZE.height
}
const oldPath = thumbnail.getPath()
// Update thumbnail
thumbnail.filename = generateImageFilename()
thumbnail.width = size.width
thumbnail.height = size.height
const thumbnailPath = thumbnail.getPath()
await processImage({ path: previewPath, destination: thumbnailPath, newSize: size, keepOriginal: true })
// Save new attributes
await thumbnail.save()
// Remove old thumbnail
await remove(oldPath)
// Don't federate, remote instances will refresh the thumbnails after a while
}

View File

@@ -69,11 +69,22 @@ npm run build -- --source-map
rm -f "./client/dist/en-US/stats.json"
rm -f "./client/dist/embed-stats.json"
# Clean up declaration files
find dist/ packages/core-utils/dist/ \
packages/ffmpeg/dist/ \
packages/node-utils/dist/ \
packages/models/dist/ \
\( -name '*.d.ts' -o -name '*.d.ts.map' \) -type f -delete
# Creating the archives
(
# local variables
directories_to_archive=("$directory_name/CREDITS.md" "$directory_name/FAQ.md" \
"$directory_name/LICENSE" "$directory_name/README.md" \
"$directory_name/packages/core-utils/dist/" "$directory_name/packages/core-utils/package.json" \
"$directory_name/packages/ffmpeg/dist/" "$directory_name/packages/ffmpeg/package.json" \
"$directory_name/packages/node-utils/dist/" "$directory_name/packages/node-utils/package.json" \
"$directory_name/packages/models/dist/" "$directory_name/packages/models/package.json" \
"$directory_name/client/dist/" "$directory_name/client/yarn.lock" \
"$directory_name/client/package.json" "$directory_name/config" \
"$directory_name/dist" "$directory_name/package.json" \
@@ -124,7 +135,7 @@ rm -f "./client/dist/embed-stats.json"
# Release types package
npm run generate-types-package "$version"
cd packages/types/dist
cd packages/types-generator/dist
npm publish --access public
fi
)

View File

@@ -1,58 +0,0 @@
import { program } from 'commander'
import { isUserPasswordValid } from '../server/helpers/custom-validators/users'
import { initDatabaseModels } from '../server/initializers/database'
import { UserModel } from '../server/models/user/user'
program
.option('-u, --user [user]', 'User')
.parse(process.argv)
const options = program.opts()
if (options.user === undefined) {
console.error('All parameters are mandatory.')
process.exit(-1)
}
initDatabaseModels(true)
.then(() => {
return UserModel.loadByUsername(options.user)
})
.then(user => {
if (!user) {
console.error('Unknown user.')
process.exit(-1)
}
const readline = require('readline')
const Writable = require('stream').Writable
const mutableStdout = new Writable({
write: function (_chunk, _encoding, callback) {
callback()
}
})
const rl = readline.createInterface({
input: process.stdin,
output: mutableStdout,
terminal: true
})
console.log('New password?')
rl.on('line', function (password) {
if (!isUserPasswordValid(password)) {
console.error('New password is invalid.')
process.exit(-1)
}
user.password = password
user.save()
.then(() => console.log('User password updated.'))
.catch(err => console.error(err))
.finally(() => process.exit(0))
})
})
.catch(err => {
console.error(err)
process.exit(-1)
})

View File

@@ -1,17 +0,0 @@
#!/bin/sh
set -eu
NOCLIENT=1 yarn install --pure-lockfile
rm -rf ./dist/server/tools/
(
cd ./server/tools
yarn install --pure-lockfile
../../node_modules/.bin/tsc --build --verbose
)
cp -r "./server/tools/node_modules" "./dist/server/tools"
npm run resolve-tspaths:server

View File

@@ -1,5 +1,5 @@
import Bluebird from 'bluebird'
import { wait } from '@shared/core-utils'
import { wait } from '@peertube/peertube-core-utils'
import {
createSingleServer,
doubleFollow,
@@ -7,7 +7,7 @@ import {
PeerTubeServer,
setAccessTokensToServers,
waitJobs
} from '@shared/server-commands'
} from '@peertube/peertube-server-commands'
let servers: PeerTubeServer[]
const viewers: { xForwardedFor: string }[] = []

View File

@@ -4,7 +4,10 @@
"outDir": "../dist/scripts"
},
"references": [
{ "path": "../shared" },
{ "path": "../packages/core-utils" },
{ "path": "../packages/models" },
{ "path": "../packages/node-utils" },
{ "path": "../packages/server-commands" },
{ "path": "../server" }
]
}

View File

@@ -1,140 +0,0 @@
import { updateTorrentMetadata } from '@server/helpers/webtorrent'
import { getServerActor } from '@server/models/application/application'
import { WEBSERVER } from '../server/initializers/constants'
import { initDatabaseModels } from '../server/initializers/database'
import {
getLocalAccountActivityPubUrl,
getLocalVideoActivityPubUrl,
getLocalVideoAnnounceActivityPubUrl,
getLocalVideoChannelActivityPubUrl,
getLocalVideoCommentActivityPubUrl
} from '../server/lib/activitypub/url'
import { AccountModel } from '../server/models/account/account'
import { ActorModel } from '../server/models/actor/actor'
import { ActorFollowModel } from '../server/models/actor/actor-follow'
import { VideoModel } from '../server/models/video/video'
import { VideoChannelModel } from '../server/models/video/video-channel'
import { VideoCommentModel } from '../server/models/video/video-comment'
import { VideoShareModel } from '../server/models/video/video-share'
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
await initDatabaseModels(true)
const serverAccount = await getServerActor()
{
const res = await ActorFollowModel.listAcceptedFollowingUrlsForApi([ serverAccount.id ], undefined)
const hasFollowing = res.total > 0
if (hasFollowing === true) {
throw new Error('Cannot update host because you follow other servers!')
}
}
console.log('Updating actors.')
const actors: ActorModel[] = await ActorModel.unscoped().findAll({
include: [
{
model: VideoChannelModel.unscoped(),
required: false
},
{
model: AccountModel.unscoped(),
required: false
}
]
})
for (const actor of actors) {
if (actor.isOwned() === false) continue
console.log('Updating actor ' + actor.url)
const newUrl = actor.Account
? getLocalAccountActivityPubUrl(actor.preferredUsername)
: getLocalVideoChannelActivityPubUrl(actor.preferredUsername)
actor.url = newUrl
actor.inboxUrl = newUrl + '/inbox'
actor.outboxUrl = newUrl + '/outbox'
actor.sharedInboxUrl = WEBSERVER.URL + '/inbox'
actor.followersUrl = newUrl + '/followers'
actor.followingUrl = newUrl + '/following'
await actor.save()
}
console.log('Updating video shares.')
const videoShares: VideoShareModel[] = await VideoShareModel.findAll({
include: [ VideoModel.unscoped(), ActorModel.unscoped() ]
})
for (const videoShare of videoShares) {
if (videoShare.Video.isOwned() === false) continue
console.log('Updating video share ' + videoShare.url)
videoShare.url = getLocalVideoAnnounceActivityPubUrl(videoShare.Actor, videoShare.Video)
await videoShare.save()
}
console.log('Updating video comments.')
const videoComments: VideoCommentModel[] = await VideoCommentModel.findAll({
include: [
{
model: VideoModel.unscoped()
},
{
model: AccountModel.unscoped(),
include: [
{
model: ActorModel.unscoped()
}
]
}
]
})
for (const comment of videoComments) {
if (comment.isOwned() === false) continue
console.log('Updating comment ' + comment.url)
comment.url = getLocalVideoCommentActivityPubUrl(comment.Video, comment)
await comment.save()
}
console.log('Updating video and torrent files.')
const ids = await VideoModel.listLocalIds()
for (const id of ids) {
const video = await VideoModel.loadFull(id)
console.log('Updating video ' + video.uuid)
video.url = getLocalVideoActivityPubUrl(video)
await video.save()
for (const file of video.VideoFiles) {
console.log('Updating torrent file %s of video %s.', file.resolution, video.uuid)
await updateTorrentMetadata(video, file)
await file.save()
}
const playlist = video.getHLSPlaylist()
for (const file of (playlist?.VideoFiles || [])) {
console.log('Updating fragmented torrent file %s of video %s.', file.resolution, video.uuid)
await updateTorrentMetadata(playlist, file)
await file.save()
}
}
}

View File

@@ -1,108 +0,0 @@
#!/bin/sh
set -eu
PEERTUBE_PATH=${1:-/var/www/peertube}
if [ ! -e "$PEERTUBE_PATH" ]; then
echo "Error - path \"$PEERTUBE_PATH\" wasn't found"
echo ""
echo "If peertube was installed in another path, you can specify it with"
echo " ./upgrade.sh <PATH>"
exit 1
fi
if [ ! -e "$PEERTUBE_PATH/versions" -o ! -e "$PEERTUBE_PATH/config/production.yaml" ]; then
echo "Error - Couldn't find peertube installation in \"$PEERTUBE_PATH\""
echo ""
echo "If peertube was installed in another path, you can specify it with"
echo " ./upgrade.sh <PATH>"
exit 1
fi
if [ -x "$(command -v awk)" ] && [ -x "$(command -v sed)" ]; then
REMAINING=$(df -k $PEERTUBE_PATH | awk '{ print $4}' | sed -n 2p)
ONE_GB=$((1024 * 1024))
if [ "$REMAINING" -lt "$ONE_GB" ]; then
echo "Error - not enough free space for upgrading"
echo ""
echo "Make sure you have at least 1 GB of free space in $PEERTUBE_PATH"
exit 1
fi
fi
# Backup database
if [ -x "$(command -v pg_dump)" ]; then
mkdir -p $PEERTUBE_PATH/backup
SQL_BACKUP_PATH="$PEERTUBE_PATH/backup/sql-peertube_prod-$(date +"%Y%m%d-%H%M").bak"
echo "Backing up PostgreSQL database in $SQL_BACKUP_PATH"
DB_USER=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['username'])")
DB_PASS=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['password'])")
DB_HOST=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['hostname'])")
DB_PORT=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['port'])")
DB_SUFFIX=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['suffix'])")
DB_NAME=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['name'] || '')")
PGPASSWORD=$DB_PASS pg_dump -U $DB_USER -p $DB_PORT -h $DB_HOST -F c "${DB_NAME:-peertube${DB_SUFFIX}}" -f "$SQL_BACKUP_PATH"
else
echo "pg_dump not found. Cannot make a SQL backup!"
fi
# If there is a pre-release, give the user a choice which one to install.
RELEASE_VERSION=$(curl -s https://api.github.com/repos/chocobozzz/peertube/releases/latest | grep tag_name | cut -d '"' -f 4)
PRE_RELEASE_VERSION=$(curl -s https://api.github.com/repos/chocobozzz/peertube/releases | grep tag_name | head -1 | cut -d '"' -f 4)
if [ "$RELEASE_VERSION" != "$PRE_RELEASE_VERSION" ]; then
echo -e "Which version do you want to install?\n[1] $RELEASE_VERSION (stable) \n[2] $PRE_RELEASE_VERSION (pre-release)"
read choice
case $choice in
[1]* ) VERSION="$RELEASE_VERSION";;
[2]* ) VERSION="$PRE_RELEASE_VERSION";;
* ) exit;
esac
else
VERSION="$RELEASE_VERSION"
fi
echo "Installing Peertube version $VERSION"
wget -q "https://github.com/Chocobozzz/PeerTube/releases/download/${VERSION}/peertube-${VERSION}.zip" -O "$PEERTUBE_PATH/versions/peertube-${VERSION}.zip"
cd $PEERTUBE_PATH/versions
unzip -o "peertube-${VERSION}.zip"
rm -f "peertube-${VERSION}.zip"
RELEASE_PAGE_URL="https://github.com/Chocobozzz/PeerTube/releases/tag/${VERSION}"
LATEST_VERSION_DIRECTORY="$PEERTUBE_PATH/versions/peertube-${VERSION}"
cd "$LATEST_VERSION_DIRECTORY"
# Launch yarn to check if we have all required dependencies
NOCLIENT=1 yarn install --production --pure-lockfile
# Switch to latest code version
rm -rf $PEERTUBE_PATH/peertube-latest
ln -s "$LATEST_VERSION_DIRECTORY" $PEERTUBE_PATH/peertube-latest
cp $PEERTUBE_PATH/peertube-latest/config/default.yaml $PEERTUBE_PATH/config/default.yaml
echo ""
echo "=========================================================="
echo ""
if [ -x "$(command -v git)" ]; then
cd /var/www/peertube
git merge-file -p config/production.yaml "$LATEST_VERSION_DIRECTORY/config/production.yaml.example" "peertube-latest/config/production.yaml.example" | tee "config/production.yaml.new" > /dev/null
echo "/var/www/peertube/config/production.yaml.new generated"
echo "You can review it and replace your existing production.yaml configuration"
else
echo "git command not found: unable to generate config/production.yaml.new configuration file based on your existing production.yaml configuration"
fi
echo ""
echo "=========================================================="
echo ""
echo "Please read the IMPORTANT NOTES on $RELEASE_PAGE_URL"
echo ""
echo "Then restart PeerTube!"