1- /**
2- * Chunked file uploader for Sploder-Launcher builds
3- * Uploads build artifacts in 90MB chunks with API key authentication
4- */
5-
61const fs = require ( 'fs' ) ;
72const path = require ( 'path' ) ;
83const https = require ( 'https' ) ;
94const http = require ( 'http' ) ;
105const { createHash } = require ( 'crypto' ) ;
116
12- // Configuration
13- const CHUNK_SIZE = 90 * 1024 * 1024 ; // 90MB chunks
7+ const CHUNK_SIZE = 90 * 1024 * 1024 ;
148const API_KEY = process . env . UPLOAD_API_KEY ;
159const UPLOAD_URL = process . env . UPLOAD_URL ;
1610
17- // Validate environment
18- if ( ! API_KEY ) {
19- console . error ( '❌ UPLOAD_API_KEY environment variable is required' ) ;
20- process . exit ( 1 ) ;
21- }
22-
23- if ( ! UPLOAD_URL ) {
24- console . error ( '❌ UPLOAD_URL environment variable is required' ) ;
11+ if ( ! API_KEY || ! UPLOAD_URL ) {
12+ console . error ( 'Missing required environment variables: UPLOAD_API_KEY, UPLOAD_URL' ) ;
2513 process . exit ( 1 ) ;
2614}
2715
2816const UPLOAD_ENDPOINT = UPLOAD_URL + '/update/upload.php' ;
29-
30- // Parse command line arguments
3117const uploadDir = process . argv . find ( arg => arg . startsWith ( '--dir=' ) ) ?. split ( '=' ) [ 1 ] || './dist' ;
3218
33- console . log ( `🚀 Starting chunked upload from directory: ${ uploadDir } ` ) ;
34- console . log ( `📡 Upload endpoint: ${ UPLOAD_ENDPOINT } ` ) ;
35- console . log ( `📦 Chunk size: ${ ( CHUNK_SIZE / 1024 / 1024 ) . toFixed ( 1 ) } MB` ) ;
36- console . log ( `🔑 API Key: ${ API_KEY . substring ( 0 , 4 ) } ...${ API_KEY . substring ( API_KEY . length - 4 ) } \n` ) ;
37-
38- /**
39- * Calculate MD5 hash of a file
40- */
4119function calculateFileHash ( filePath ) {
4220 return new Promise ( ( resolve , reject ) => {
4321 const hash = createHash ( 'md5' ) ;
4422 const stream = fs . createReadStream ( filePath ) ;
45-
4623 stream . on ( 'data' , data => hash . update ( data ) ) ;
4724 stream . on ( 'end' , ( ) => resolve ( hash . digest ( 'hex' ) ) ) ;
4825 stream . on ( 'error' , reject ) ;
4926 } ) ;
5027}
5128
52- /**
53- * Make HTTP request
54- */
5529function makeRequest ( url , options , data ) {
5630 return new Promise ( ( resolve , reject ) => {
5731 const urlObj = new URL ( url ) ;
@@ -76,47 +50,33 @@ function makeRequest(url, options, data) {
7650 } ) ;
7751
7852 req . on ( 'error' , reject ) ;
79-
80- if ( data ) {
81- req . write ( data ) ;
82- }
83-
53+ if ( data ) req . write ( data ) ;
8454 req . end ( ) ;
8555 } ) ;
8656}
8757
88- /**
89- * Upload a single chunk using multipart/form-data with raw binary data
90- */
9158async function uploadChunk ( fileName , chunkIndex , chunkData , totalChunks , fileHash ) {
9259 const boundary = '----WebKitFormBoundary' + Math . random ( ) . toString ( 36 ) . substring ( 2 ) ;
9360
94- // Build multipart form data
9561 const parts = [
9662 `--${ boundary } ` ,
97- `Content-Disposition: form-data; name="api_key"` ,
98- '' ,
63+ 'Content-Disposition: form-data; name="api_key"' , '' ,
9964 API_KEY ,
10065 `--${ boundary } ` ,
101- `Content-Disposition: form-data; name="file_name"` ,
102- '' ,
66+ 'Content-Disposition: form-data; name="file_name"' , '' ,
10367 fileName ,
10468 `--${ boundary } ` ,
105- `Content-Disposition: form-data; name="chunk_index"` ,
106- '' ,
69+ 'Content-Disposition: form-data; name="chunk_index"' , '' ,
10770 chunkIndex . toString ( ) ,
10871 `--${ boundary } ` ,
109- `Content-Disposition: form-data; name="total_chunks"` ,
110- '' ,
72+ 'Content-Disposition: form-data; name="total_chunks"' , '' ,
11173 totalChunks . toString ( ) ,
11274 `--${ boundary } ` ,
113- `Content-Disposition: form-data; name="file_hash"` ,
114- '' ,
75+ 'Content-Disposition: form-data; name="file_hash"' , '' ,
11576 fileHash ,
11677 `--${ boundary } ` ,
11778 `Content-Disposition: form-data; name="chunk_data"; filename="chunk_${ chunkIndex } "` ,
118- `Content-Type: application/octet-stream` ,
119- ''
79+ 'Content-Type: application/octet-stream' , ''
12080 ] ;
12181
12282 const header = Buffer . from ( parts . join ( '\r\n' ) + '\r\n' ) ;
@@ -131,124 +91,82 @@ async function uploadChunk(fileName, chunkIndex, chunkData, totalChunks, fileHas
13191 }
13292 } , formData ) ;
13393
134- if ( response . status !== 200 ) {
135- throw new Error ( `HTTP ${ response . status } : ${ JSON . stringify ( response . data ) } ` ) ;
136- }
137-
138- if ( ! response . data . success ) {
139- throw new Error ( `Server error: ${ response . data . message } ` ) ;
94+ if ( response . status !== 200 || ! response . data . success ) {
95+ throw new Error ( response . data . message || `HTTP ${ response . status } ` ) ;
14096 }
14197
14298 return response . data ;
14399}
144100
145- /**
146- * Upload a file in chunks
147- */
148101async function uploadFile ( filePath ) {
149102 const fileName = path . basename ( filePath ) ;
150103 const fileSize = fs . statSync ( filePath ) . size ;
151104 const totalChunks = Math . ceil ( fileSize / CHUNK_SIZE ) ;
152105
153- console . log ( `📄 Uploading: ${ fileName } ` ) ;
154- console . log ( `📊 Size: ${ ( fileSize / 1024 / 1024 ) . toFixed ( 2 ) } MB` ) ;
155- console . log ( `🧩 Chunks: ${ totalChunks } ` ) ;
106+ console . log ( `Uploading: ${ fileName } (${ ( fileSize / 1024 / 1024 ) . toFixed ( 2 ) } MB, ${ totalChunks } chunks)` ) ;
156107
157- // Calculate file hash
158- console . log ( '🔍 Calculating file hash...' ) ;
159108 const fileHash = await calculateFileHash ( filePath ) ;
160- console . log ( `✅ File hash: ${ fileHash } ` ) ;
161-
162- // Upload chunks
163109 const fileStream = fs . createReadStream ( filePath , { highWaterMark : CHUNK_SIZE } ) ;
164110 let chunkIndex = 0 ;
165111
166112 for await ( const chunk of fileStream ) {
167113 const progress = ( ( chunkIndex + 1 ) / totalChunks * 100 ) . toFixed ( 1 ) ;
168- const chunkSize = ( chunk . length / 1024 / 1024 ) . toFixed ( 2 ) ;
169-
170- console . log ( `⬆️ Uploading chunk ${ chunkIndex + 1 } /${ totalChunks } (${ progress } %) - ${ chunkSize } MB` ) ;
171-
114+ console . log ( `Chunk ${ chunkIndex + 1 } /${ totalChunks } (${ progress } %)` ) ;
172115 await uploadChunk ( fileName , chunkIndex , chunk , totalChunks , fileHash ) ;
173116 chunkIndex ++ ;
174117 }
175118
176- console . log ( `✅ Successfully uploaded : ${ fileName } \n ` ) ;
119+ console . log ( `Completed : ${ fileName } ` ) ;
177120}
178121
179- /**
180- * Find files to upload based on platform
181- */
182122function findFilesToUpload ( directory ) {
183123 const files = [ ] ;
184124
185125 if ( ! fs . existsSync ( directory ) ) {
186- console . error ( `❌ Directory not found: ${ directory } ` ) ;
126+ console . error ( `Directory not found: ${ directory } ` ) ;
187127 return files ;
188128 }
189129
190130 const items = fs . readdirSync ( directory , { withFileTypes : true } ) ;
191131
192132 for ( const item of items ) {
193- const fullPath = path . join ( directory , item . name ) ;
194-
195133 if ( item . isFile ( ) ) {
196- // Windows installer files
134+ const fullPath = path . join ( directory , item . name ) ;
135+
197136 if ( item . name . endsWith ( '.exe' ) && item . name . includes ( 'Setup' ) ) {
198137 files . push ( { path : fullPath , type : 'windows-installer' } ) ;
199- }
200- // Windows portable files
201- else if ( item . name . endsWith ( '.zip' ) && item . name . includes ( 'Portable' ) ) {
138+ } else if ( item . name . endsWith ( '.zip' ) && item . name . includes ( 'Portable' ) ) {
202139 files . push ( { path : fullPath , type : 'windows-portable' } ) ;
203- }
204- // macOS zip file
205- else if ( item . name === 'Sploder-macOS.zip' ) {
140+ } else if ( item . name === 'Sploder-macOS.zip' ) {
206141 files . push ( { path : fullPath , type : 'macos-app' } ) ;
207142 }
208143 }
209- // Note: Directory handling removed to avoid duplicates
210- // The build script should create Sploder-macOS.zip directly
211144 }
212145
213146 return files ;
214147}
215148
216- /**
217- * Main upload process
218- */
219149async function main ( ) {
220150 try {
221- console . log ( '🔍 Scanning for files to upload...' ) ;
222151 const files = findFilesToUpload ( uploadDir ) ;
223152
224153 if ( files . length === 0 ) {
225- console . log ( '📭 No files found to upload' ) ;
154+ console . log ( 'No files found to upload' ) ;
226155 return ;
227156 }
228157
229- console . log ( `📋 Found ${ files . length } files to upload:` ) ;
230- files . forEach ( file => {
231- console . log ( ` • ${ path . basename ( file . path ) } (${ file . type } )` ) ;
232- } ) ;
233- console . log ( '' ) ;
158+ console . log ( `Found ${ files . length } files:` ) ;
159+ files . forEach ( file => console . log ( `- ${ path . basename ( file . path ) } (${ file . type } )` ) ) ;
234160
235- // Upload each file
236161 for ( const file of files ) {
237- try {
238- await uploadFile ( file . path ) ;
239- } catch ( error ) {
240- console . error ( `❌ Failed to upload ${ file . path } : ${ error . message } ` ) ;
241- process . exit ( 1 ) ;
242- }
162+ await uploadFile ( file . path ) ;
243163 }
244164
245- console . log ( '🎉 All files uploaded successfully!' ) ;
246-
165+ console . log ( 'All files uploaded successfully' ) ;
247166 } catch ( error ) {
248- console . error ( `❌ Upload process failed: ${ error . message } ` ) ;
167+ console . error ( `Upload failed: ${ error . message } ` ) ;
249168 process . exit ( 1 ) ;
250169 }
251170}
252171
253- // Run the uploader
254172main ( ) ;
0 commit comments