Compare commits
10 Commits
rename_to_
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0cd0f0d7a | ||
|
|
f222c95e8a | ||
|
|
e0f7805599 | ||
|
|
ba4f8b9c2c | ||
|
|
4ea93f4c73 | ||
|
|
a066b73f2d | ||
|
|
5e05567309 | ||
|
|
b59ac53e4b | ||
|
|
7f1dc01247 | ||
|
|
0c37362a8a |
@@ -1,5 +1,9 @@
|
|||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
## [0.2.0] - 2025-01-21
|
||||||
|
|
||||||
|
- Rename to from Picop to Picopackage
|
||||||
|
|
||||||
## [0.1.0] - 2025-01-19
|
## [0.1.0] - 2025-01-19
|
||||||
|
|
||||||
- Initial release
|
- Initial release
|
||||||
|
|||||||
2
Gemfile
2
Gemfile
@@ -10,3 +10,5 @@ gem "rake", "~> 13.0"
|
|||||||
gem "minitest", "~> 5.16"
|
gem "minitest", "~> 5.16"
|
||||||
|
|
||||||
gem "rubocop", "~> 1.21"
|
gem "rubocop", "~> 1.21"
|
||||||
|
|
||||||
|
gem "standard", "~> 1.44"
|
||||||
|
|||||||
31
Gemfile.lock
31
Gemfile.lock
@@ -1,7 +1,7 @@
|
|||||||
PATH
|
PATH
|
||||||
remote: .
|
remote: .
|
||||||
specs:
|
specs:
|
||||||
picop (0.1.0)
|
picopackage (0.2.1)
|
||||||
digest
|
digest
|
||||||
open-uri (~> 0.5)
|
open-uri (~> 0.5)
|
||||||
yaml (~> 0.4)
|
yaml (~> 0.4)
|
||||||
@@ -14,22 +14,27 @@ GEM
|
|||||||
debug (1.10.0)
|
debug (1.10.0)
|
||||||
irb (~> 1.10)
|
irb (~> 1.10)
|
||||||
reline (>= 0.3.8)
|
reline (>= 0.3.8)
|
||||||
digest (3.1.1)
|
digest (3.2.0)
|
||||||
io-console (0.8.0)
|
io-console (0.8.0)
|
||||||
irb (1.14.3)
|
irb (1.15.1)
|
||||||
|
pp (>= 0.6.0)
|
||||||
rdoc (>= 4.0.0)
|
rdoc (>= 4.0.0)
|
||||||
reline (>= 0.4.2)
|
reline (>= 0.4.2)
|
||||||
json (2.9.1)
|
json (2.9.1)
|
||||||
language_server-protocol (3.17.0.3)
|
language_server-protocol (3.17.0.3)
|
||||||
|
lint_roller (1.1.0)
|
||||||
minitest (5.25.4)
|
minitest (5.25.4)
|
||||||
open-uri (0.5.0)
|
open-uri (0.5.0)
|
||||||
stringio
|
stringio
|
||||||
time
|
time
|
||||||
uri
|
uri
|
||||||
parallel (1.26.3)
|
parallel (1.26.3)
|
||||||
parser (3.3.6.0)
|
parser (3.3.7.0)
|
||||||
ast (~> 2.4.1)
|
ast (~> 2.4.1)
|
||||||
racc
|
racc
|
||||||
|
pp (0.6.2)
|
||||||
|
prettyprint
|
||||||
|
prettyprint (0.2.0)
|
||||||
psych (5.2.3)
|
psych (5.2.3)
|
||||||
date
|
date
|
||||||
stringio
|
stringio
|
||||||
@@ -53,7 +58,22 @@ GEM
|
|||||||
unicode-display_width (>= 2.4.0, < 4.0)
|
unicode-display_width (>= 2.4.0, < 4.0)
|
||||||
rubocop-ast (1.37.0)
|
rubocop-ast (1.37.0)
|
||||||
parser (>= 3.3.1.0)
|
parser (>= 3.3.1.0)
|
||||||
|
rubocop-performance (1.23.1)
|
||||||
|
rubocop (>= 1.48.1, < 2.0)
|
||||||
|
rubocop-ast (>= 1.31.1, < 2.0)
|
||||||
ruby-progressbar (1.13.0)
|
ruby-progressbar (1.13.0)
|
||||||
|
standard (1.44.0)
|
||||||
|
language_server-protocol (~> 3.17.0.2)
|
||||||
|
lint_roller (~> 1.0)
|
||||||
|
rubocop (~> 1.70.0)
|
||||||
|
standard-custom (~> 1.0.0)
|
||||||
|
standard-performance (~> 1.6)
|
||||||
|
standard-custom (1.0.2)
|
||||||
|
lint_roller (~> 1.0)
|
||||||
|
rubocop (~> 1.50)
|
||||||
|
standard-performance (1.6.0)
|
||||||
|
lint_roller (~> 1.1)
|
||||||
|
rubocop-performance (~> 1.23.0)
|
||||||
stringio (3.1.2)
|
stringio (3.1.2)
|
||||||
time (0.4.1)
|
time (0.4.1)
|
||||||
date
|
date
|
||||||
@@ -70,9 +90,10 @@ PLATFORMS
|
|||||||
DEPENDENCIES
|
DEPENDENCIES
|
||||||
debug
|
debug
|
||||||
minitest (~> 5.16)
|
minitest (~> 5.16)
|
||||||
picop!
|
picopackage!
|
||||||
rake (~> 13.0)
|
rake (~> 13.0)
|
||||||
rubocop (~> 1.21)
|
rubocop (~> 1.21)
|
||||||
|
standard (~> 1.44)
|
||||||
|
|
||||||
BUNDLED WITH
|
BUNDLED WITH
|
||||||
2.6.2
|
2.6.2
|
||||||
|
|||||||
20
README.md
20
README.md
@@ -1,28 +1,16 @@
|
|||||||
# Picop
|
# Picopackage
|
||||||
|
|
||||||
TODO: Delete this and the text below, and describe your gem
|
A command line tool for installing and managing [Picopackages](https://picopackage.org).
|
||||||
|
|
||||||
Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/picop`. To experiment with that code, run `bin/console` for an interactive prompt.
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
TODO: Replace `UPDATE_WITH_YOUR_GEM_NAME_IMMEDIATELY_AFTER_RELEASE_TO_RUBYGEMS_ORG` with your gem name right after releasing it to RubyGems.org. Please do not do it earlier due to security reasons. Alternatively, replace this section with instructions to install your gem from git if you don't plan to release to RubyGems.org.
|
|
||||||
|
|
||||||
Install the gem and add to the application's Gemfile by executing:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bundle add UPDATE_WITH_YOUR_GEM_NAME_IMMEDIATELY_AFTER_RELEASE_TO_RUBYGEMS_ORG
|
gem install picopackage
|
||||||
```
|
|
||||||
|
|
||||||
If bundler is not being used to manage dependencies, install the gem by executing:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
gem install UPDATE_WITH_YOUR_GEM_NAME_IMMEDIATELY_AFTER_RELEASE_TO_RUBYGEMS_ORG
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
TODO: Write usage instructions here
|
`picopackage install <url|filepath>`
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
|
|||||||
7
Rakefile
7
Rakefile
@@ -5,8 +5,9 @@ require "minitest/test_task"
|
|||||||
|
|
||||||
Minitest::TestTask.create
|
Minitest::TestTask.create
|
||||||
|
|
||||||
require "rubocop/rake_task"
|
# require "rubocop/rake_task"
|
||||||
|
# RuboCop::RakeTask.new
|
||||||
|
|
||||||
RuboCop::RakeTask.new
|
require "standard/rake"
|
||||||
|
|
||||||
task default: %i[test rubocop]
|
task default: %i[test standard]
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require "bundler/setup"
|
require "bundler/setup"
|
||||||
require "picop"
|
require "picopackage"
|
||||||
|
|
||||||
# You can add fixtures and/or initialization code here to make experimenting
|
# You can add fixtures and/or initialization code here to make experimenting
|
||||||
# with your gem easier. You can also use a different console, if you like.
|
# with your gem easier. You can also use a different console, if you like.
|
||||||
|
|||||||
6
exe/ppkg
6
exe/ppkg
@@ -1,15 +1,15 @@
|
|||||||
#!/usr/bin/env ruby
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
# Add lib directory to load path
|
# Add lib directory to load path
|
||||||
lib_path = File.expand_path('../lib', __dir__)
|
lib_path = File.expand_path("../lib", __dir__)
|
||||||
$LOAD_PATH.unshift(lib_path) unless $LOAD_PATH.include?(lib_path)
|
$LOAD_PATH.unshift(lib_path) unless $LOAD_PATH.include?(lib_path)
|
||||||
|
|
||||||
require 'picopackage'
|
require "picopackage"
|
||||||
|
|
||||||
begin
|
begin
|
||||||
Picopackage::CLI.run(ARGV)
|
Picopackage::CLI.run(ARGV)
|
||||||
rescue => e
|
rescue => e
|
||||||
warn "Error: #{e.message}"
|
warn "Error: #{e.message}"
|
||||||
warn e.backtrace if ENV['DEBUG']
|
warn e.backtrace if ENV["DEBUG"]
|
||||||
exit 1
|
exit 1
|
||||||
end
|
end
|
||||||
|
|||||||
18
lib/picopackage.rb
Normal file
18
lib/picopackage.rb
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require_relative "picopackage/version"
|
||||||
|
require_relative "picopackage/fetch"
|
||||||
|
require_relative "picopackage/provider"
|
||||||
|
require_relative "picopackage/package"
|
||||||
|
require_relative "picopackage/scanner"
|
||||||
|
require_relative "picopackage/cli"
|
||||||
|
|
||||||
|
module Picopackage
|
||||||
|
class Error < StandardError; end
|
||||||
|
|
||||||
|
class FileTooLargeError < StandardError; end
|
||||||
|
|
||||||
|
class FetchError < StandardError; end
|
||||||
|
|
||||||
|
class LocalModificationError < StandardError; end
|
||||||
|
end
|
||||||
@@ -5,71 +5,71 @@ module Picopackage
|
|||||||
def self.run(argv = ARGV)
|
def self.run(argv = ARGV)
|
||||||
command = argv.shift
|
command = argv.shift
|
||||||
case command
|
case command
|
||||||
when 'scan'
|
when "scan"
|
||||||
options = {}
|
options = {}
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg scan [options] DIRECTORY"
|
opts.banner = "Usage: ppkg scan [options] DIRECTORY"
|
||||||
# opts.on('-v', '--verbose', 'Run verbosely') { |v| options[:verbose] = v }
|
# opts.on('-v', '--verbose', 'Run verbosely') { |v| options[:verbose] = v }
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
|
|
||||||
dir = argv.first || '.'
|
dir = argv.first || "."
|
||||||
Picopackage::Scanner.scan(dir).each { |f| puts f.file_path }
|
Picopackage::Scanner.scan(dir).each { |f| puts f.file_path }
|
||||||
|
|
||||||
when 'sign'
|
when "init"
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg sign FILE"
|
opts.banner = "Usage: ppkg init FILE"
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
|
|
||||||
file = argv.first
|
file = argv.first
|
||||||
Picopackage::SourceFile.from_file(file).sign
|
Picopackage::Package.from_file(file).init_metadata
|
||||||
|
|
||||||
when 'checksum'
|
when "checksum"
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg checksum FILE"
|
opts.banner = "Usage: ppkg checksum FILE"
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
file = argv.first
|
file = argv.first
|
||||||
puts Picopackage::SourceFile.from_file(file).checksum
|
puts Picopackage::Package.from_file(file).checksum
|
||||||
|
|
||||||
when 'verify'
|
when "verify"
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg sign FILE"
|
opts.banner = "Usage: ppkg sign FILE"
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
|
|
||||||
path = argv.first
|
path = argv.first
|
||||||
source = SourceFile.from_file(path)
|
source = Package.from_file(path)
|
||||||
|
|
||||||
if source.metadata['content_checksum'].nil?
|
if source.metadata["content_checksum"].nil?
|
||||||
puts "⚠️ No checksum found in #{path}"
|
puts "⚠️ No checksum found in #{path}"
|
||||||
puts "Run 'ppkg sign #{path}' to add one"
|
puts "Run 'ppkg sign #{path}' to add one"
|
||||||
exit 1
|
exit 1
|
||||||
end
|
end
|
||||||
|
|
||||||
unless source.verify
|
unless source.verify_payload
|
||||||
puts "❌ Checksum verification failed for #{path}"
|
puts "❌ Checksum verification failed for #{path}"
|
||||||
puts "Expected: #{source.metadata['content_checksum']}"
|
puts "Expected: #{source.metadata["content_checksum"]}"
|
||||||
puts "Got: #{source.checksum}"
|
puts "Got: #{source.checksum}"
|
||||||
exit 1
|
exit 1
|
||||||
end
|
end
|
||||||
|
|
||||||
puts "✅ #{path} verified successfully"
|
puts "✅ #{path} verified successfully"
|
||||||
|
|
||||||
when 'inspect'
|
when "inspect"
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg inspect FILE|DIRECTORY"
|
opts.banner = "Usage: ppkg inspect FILE|DIRECTORY"
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
|
|
||||||
path = argv.first
|
path = argv.first
|
||||||
Picopackage::SourceFile.from_file(path).inspect
|
Picopackage::Package.from_file(path).inspect_metadata
|
||||||
|
|
||||||
when 'fetch'
|
when "fetch"
|
||||||
options = {force: false}
|
options = {force: false}
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: ppkg fetch [options] URI [PATH]"
|
opts.banner = "Usage: ppkg fetch [options] URI [PATH]"
|
||||||
opts.on('-f', '--force', 'Force fetch') { |f| options[:force] = f }
|
opts.on("-f", "--force", "Force fetch") { |f| options[:force] = f }
|
||||||
end.parse!(argv)
|
end.parse!(argv)
|
||||||
|
|
||||||
url = argv.shift
|
url = argv.shift
|
||||||
path = argv.shift || '.' # use '.' if no path provided
|
path = argv.shift || "." # use '.' if no path provided
|
||||||
|
|
||||||
if url.nil?
|
if url.nil?
|
||||||
puts "Error: URI is required"
|
puts "Error: URI is required"
|
||||||
@@ -77,19 +77,35 @@ module Picopackage
|
|||||||
end
|
end
|
||||||
|
|
||||||
begin
|
begin
|
||||||
source_file = Fetch.fetch(url, path, force: options[:force])
|
Fetch.fetch(url, path, force: options[:force])
|
||||||
|
rescue LocalModificationError => e
|
||||||
|
puts "Error: #{e.message}"
|
||||||
|
rescue => e
|
||||||
|
puts "Error: #{e.message}"
|
||||||
|
exit 1
|
||||||
|
end
|
||||||
|
|
||||||
|
when "update"
|
||||||
|
options = {force: false}
|
||||||
|
OptionParser.new do |opts|
|
||||||
|
opts.banner = "Usage: ppkg update [options] FILE"
|
||||||
|
opts.on("-f", "--force", "Force update") { |f| options[:force] = f }
|
||||||
|
end.parse!(argv)
|
||||||
|
|
||||||
|
file = argv.first
|
||||||
|
package = Package.from_file(file)
|
||||||
|
begin
|
||||||
|
Fetch.fetch(package.url, File.dirname(file), force: options[:force])
|
||||||
rescue LocalModificationError => e
|
rescue LocalModificationError => e
|
||||||
puts "Error: #{e.message}"
|
puts "Error: #{e.message}"
|
||||||
rescue => e
|
rescue => e
|
||||||
puts "Error: #{e.message}"
|
puts "Error: #{e.message}"
|
||||||
exit 1
|
exit 1
|
||||||
# Optionally retry with force
|
|
||||||
# source_file = Fetch.fetch(url, destination, force: true)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
else
|
else
|
||||||
puts "Unknown command: #{command}"
|
puts "Unknown command: #{command}"
|
||||||
puts "Available commands: scan, sign, inspect, update"
|
puts "Available commands: fetch, update, scan, sign, inspect"
|
||||||
exit 1
|
exit 1
|
||||||
end
|
end
|
||||||
rescue OptionParser::InvalidOption => e
|
rescue OptionParser::InvalidOption => e
|
||||||
@@ -97,8 +113,28 @@ module Picopackage
|
|||||||
exit 1
|
exit 1
|
||||||
rescue => e
|
rescue => e
|
||||||
puts "Error: #{e.message}"
|
puts "Error: #{e.message}"
|
||||||
puts e.backtrace if ENV['DEBUG']
|
puts e.backtrace if ENV["DEBUG"]
|
||||||
exit 1
|
exit 1
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def self.determine_script_source
|
||||||
|
# Get the full path of the currently executing script
|
||||||
|
current_path = File.expand_path($0)
|
||||||
|
|
||||||
|
# Check if script is in GEM_PATH
|
||||||
|
gem_paths = Gem.path.map { |p| File.expand_path(p) }
|
||||||
|
|
||||||
|
is_gem = gem_paths.any? { |path| current_path.start_with?(path) }
|
||||||
|
|
||||||
|
if is_gem
|
||||||
|
# Running from gem installation
|
||||||
|
gem_name = File.basename(File.dirname(File.dirname(current_path)))
|
||||||
|
version = File.basename(File.dirname(current_path))
|
||||||
|
{source: :gem, path: current_path, gem_name: gem_name, version: version}
|
||||||
|
else
|
||||||
|
# Running from local installation
|
||||||
|
{source: :local, path: current_path}
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -1,96 +1,167 @@
|
|||||||
require 'net/http'
|
require "net/http"
|
||||||
require 'fileutils'
|
require "fileutils"
|
||||||
require 'tempfile'
|
require "tempfile"
|
||||||
require 'json'
|
require "json"
|
||||||
require 'debug'
|
require "debug"
|
||||||
|
|
||||||
module Picop
|
module Picopackage
|
||||||
class Fetch
|
class Fetch
|
||||||
|
class Error < StandardError; end
|
||||||
|
class HTTPError < Error; end
|
||||||
|
class FileTooLargeError < Error; end
|
||||||
|
class NotModifiedError < Error; end # Add this
|
||||||
|
class TooManyRedirectsError < Error; end # Add this
|
||||||
|
|
||||||
|
MAX_REDIRECTS = 5 # This constant is used but not defined
|
||||||
|
|
||||||
|
def initialize(max_size: 1024 * 1024, timeout: 10)
|
||||||
|
@max_size = max_size
|
||||||
|
@timeout = timeout
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch(uri)
|
||||||
|
case uri.scheme
|
||||||
|
when "http", "https" then fetch_http(uri)
|
||||||
|
when "file" then fetch_file(uri)
|
||||||
|
else
|
||||||
|
raise Error, "Unsupported scheme: #{uri.scheme}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def self.fetch(url, destination, force: false)
|
def self.fetch(url, destination, force: false)
|
||||||
raise ArgumentError, "Destination directory does not exist: #{destination}" unless Dir.exist?(destination)
|
raise ArgumentError, "Destination directory does not exist: #{destination}" unless Dir.exist?(destination)
|
||||||
debugger
|
|
||||||
provider = Provider.for(url)
|
provider = Provider.for(url)
|
||||||
file_path = File.join(destination, provider.source_file.filename)
|
package = provider.package
|
||||||
|
file_path = File.join(destination, package.filename)
|
||||||
|
|
||||||
|
local_package = File.exist?(file_path) ? FileProvider.new(file_path).package : nil
|
||||||
|
|
||||||
|
resolver = Resolver.new(package, local_package, file_path, force: force).resolve
|
||||||
|
|
||||||
|
case resolver[:state]
|
||||||
|
when :kept, :updated
|
||||||
|
puts resolver[:message]
|
||||||
|
when :conflict
|
||||||
|
raise LocalModificationError, resolver[:message]
|
||||||
|
end
|
||||||
|
provider.package
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def fetch_http(uri, etag = nil)
|
||||||
|
Net::HTTP.start(uri.host, uri.port, connection_options(uri)) do |http|
|
||||||
|
request = Net::HTTP::Get.new(uri.request_uri)
|
||||||
|
request["If-None-Match"] = etag if etag
|
||||||
|
|
||||||
|
response = http.request(request)
|
||||||
|
handle_response(response, uri)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch_file(uri)
|
||||||
|
File.read(uri.path)
|
||||||
|
end
|
||||||
|
|
||||||
|
def connection_options(uri)
|
||||||
|
{
|
||||||
|
use_ssl: uri.scheme == "https",
|
||||||
|
read_timeout: @timeout,
|
||||||
|
open_timeout: @timeout
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_response(response, uri)
|
||||||
|
case response
|
||||||
|
when Net::HTTPSuccess
|
||||||
|
{
|
||||||
|
body: read_body(response),
|
||||||
|
etag: response["ETag"]
|
||||||
|
}
|
||||||
|
when Net::HTTPNotModified
|
||||||
|
raise NotModifiedError.new("Resource not modified", etag: response["ETag"])
|
||||||
|
when Net::HTTPRedirection
|
||||||
|
handle_redirect(response, uri)
|
||||||
|
else
|
||||||
|
raise HTTPError, "HTTP #{response.code}: #{response.message}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_redirect(response, uri, redirect_count = 0)
|
||||||
|
raise TooManyRedirectsError if redirect_count >= MAX_REDIRECTS
|
||||||
|
location = response["location"]
|
||||||
|
new_uri = URI(location)
|
||||||
|
# Handle both relative paths and full URLs
|
||||||
|
new_uri = uri.merge(location) if new_uri.relative?
|
||||||
|
fetch(new_uri, redirect_count: redirect_count + 1)
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_body(response)
|
||||||
|
buffer = String.new(capacity: @max_size)
|
||||||
|
response.read_body do |chunk|
|
||||||
|
raise FileTooLargeError, "Response would exceed #{@max_size} bytes" if buffer.bytesize + chunk.bytesize > @max_size
|
||||||
|
buffer << chunk
|
||||||
|
end
|
||||||
|
buffer
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
##
|
||||||
|
# States:
|
||||||
|
# - kept: local file was converted to a picopackage and kept
|
||||||
|
# - updated: local file was updated with remote picopackage
|
||||||
|
# - conflict: local and remote files differ - manually resolve or use -f to force
|
||||||
|
class Resolver
|
||||||
|
attr_reader :remote, :local, :local_path, :force
|
||||||
|
def initialize(remote_package, local_package, local_path, force: false)
|
||||||
|
@remote = remote_package
|
||||||
|
@local = local_package
|
||||||
|
@local_path = local_path
|
||||||
|
@force = force
|
||||||
|
@same_checksum = @remote.payload_checksum == @local&.payload_checksum
|
||||||
|
end
|
||||||
|
|
||||||
|
STATES = %i[kept updated conflict].freeze
|
||||||
|
|
||||||
|
def resolve
|
||||||
|
validate_state_hash(
|
||||||
|
if @force
|
||||||
|
@remote.save(local_path)
|
||||||
|
{state: :updated, message: "Force mode: overwrote local file with remote package"}
|
||||||
|
elsif @local.nil?
|
||||||
|
@remote.save(local_path)
|
||||||
|
{state: :kept, message: "Saved Package as new file"}
|
||||||
|
elsif @remote.payload_version != @local.payload_version
|
||||||
|
{state: :conflict, message: "Version conflict. Local: #{@local.payload_version}, Remote: #{@remote.payload_version}"}
|
||||||
|
elsif @remote.payload_timestamp_as_time > @local.payload_timestamp_as_time
|
||||||
|
@remote.save(local_path)
|
||||||
|
{state: :updated, message: "Updated to newer version"}
|
||||||
|
elsif !@same_checksum
|
||||||
|
handle_checksum_mismatch
|
||||||
|
elsif @local.was_bare_file
|
||||||
debugger
|
debugger
|
||||||
if File.exist?(file_path) && force
|
@local.save(local_path)
|
||||||
provider.source_file.save(destination)
|
{state: :kept, message: "Packaged existing file as Picopackage"}
|
||||||
elsif File.exist?(file_path)
|
|
||||||
local_source_file = SourceFile.from_file(file_path)
|
|
||||||
status = Status.compare(local_source_file, provider.source_file)
|
|
||||||
|
|
||||||
if force
|
|
||||||
provider.source_file.save(destination)
|
|
||||||
elsif status.modified?
|
|
||||||
raise LocalModificationError, "#{status.message}. Use -f or --force to overwrite local version"
|
|
||||||
elsif status.outdated?
|
|
||||||
puts "Updated from #{local_source_file.version} to #{provider.source_file.version}"
|
|
||||||
provider.source_file.save(destination)
|
|
||||||
elsif status.up_to_date?
|
|
||||||
puts status.message
|
|
||||||
end
|
|
||||||
|
|
||||||
else
|
else
|
||||||
provider.source_file.save(destination)
|
{state: :kept, message: "Local file is up to date"}
|
||||||
end
|
end
|
||||||
provider.source_file
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class Status
|
|
||||||
attr_reader :state, :local_version, :remote_version
|
|
||||||
|
|
||||||
def self.compare(local_source_file, remote_source_file)
|
|
||||||
return new(:outdated) if local_source_file.metadata.nil? || remote_source_file.metadata.nil?
|
|
||||||
|
|
||||||
local_version = local_source_file.metadata["version"]
|
|
||||||
remote_version = remote_source_file.metadata["version"]
|
|
||||||
|
|
||||||
if local_version == remote_version
|
|
||||||
if local_source_file.modified?
|
|
||||||
new(:modified, local_version:)
|
|
||||||
else
|
|
||||||
new(:up_to_date, local_version:)
|
|
||||||
end
|
|
||||||
else
|
|
||||||
new(:outdated,
|
|
||||||
local_version:,
|
|
||||||
remote_version:,
|
|
||||||
modified: local_source_file.modified?
|
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def validate_state_hash(hash)
|
||||||
|
raise "Invalid state" unless STATES.include?(hash[:state])
|
||||||
|
raise "Missing message" unless hash[:message].is_a?(String)
|
||||||
|
hash
|
||||||
end
|
end
|
||||||
|
|
||||||
def initialize(state, local_version: nil, remote_version: nil, modified: false)
|
def handle_checksum_mismatch
|
||||||
@state = state
|
if @force
|
||||||
@local_version = local_version
|
@remote.save(local_path) # In force mode, remote wins
|
||||||
@remote_version = remote_version
|
{state: :updated, message: "Overwrote local file with remote package"}
|
||||||
@modified = modified
|
|
||||||
end
|
|
||||||
|
|
||||||
def modified?
|
|
||||||
@modified || @state == :modified
|
|
||||||
end
|
|
||||||
|
|
||||||
def up_to_date?
|
|
||||||
@state == :up_to_date
|
|
||||||
end
|
|
||||||
|
|
||||||
def outdated?
|
|
||||||
@state == :outdated
|
|
||||||
end
|
|
||||||
|
|
||||||
def message
|
|
||||||
case state
|
|
||||||
when :up_to_date
|
|
||||||
"File is up to date"
|
|
||||||
when :outdated
|
|
||||||
if modified?
|
|
||||||
"Local file (v#{local_version}) has modifications but remote version (v#{remote_version}) is available"
|
|
||||||
else
|
else
|
||||||
"Local file (v#{local_version}) is outdated. Remote version: v#{remote_version}"
|
{state: :conflict, message: "Files differ. Use --force to convert both to packages"}
|
||||||
end
|
|
||||||
when :modified
|
|
||||||
"Local file has been modified from original version (v#{local_version})"
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
128
lib/picopackage/package.rb
Normal file
128
lib/picopackage/package.rb
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
require "yaml"
|
||||||
|
require "digest"
|
||||||
|
require "forwardable"
|
||||||
|
|
||||||
|
module Picopackage
|
||||||
|
METADATA_PATTERN = /^\n*#\s*@PICOPACKAGE_START\n(.*?)^\s*#\s*@PICOPACKAGE_END\s*$/m
|
||||||
|
|
||||||
|
class Metadata < Struct.new(:url, :filename, :payload_version, :payload_timestamp, :payload_checksum, :etag, keyword_init: true)
|
||||||
|
# the #from_file method will create a new instance of Metadata from a file path, rather than read a package's metadata
|
||||||
|
def self.from_file(file_path, content: nil)
|
||||||
|
new(content: File.read(file_path))
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.from_url_response(url, response)
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.from_content(content)
|
||||||
|
return new unless content =~ METADATA_PATTERN
|
||||||
|
|
||||||
|
yaml_content = $1.each_line.map { |line| line.sub(/^\s*#\s?/, "").rstrip }.join("\n")
|
||||||
|
|
||||||
|
# Load and transform in one chain
|
||||||
|
@metadata = new(**YAML.safe_load(yaml_content)
|
||||||
|
.slice(*Metadata.members.map(&:to_s))
|
||||||
|
.transform_keys(&:to_sym))
|
||||||
|
rescue
|
||||||
|
new # Return empty hash on any YAML/transformation errors
|
||||||
|
end
|
||||||
|
|
||||||
|
def empty? = to_h.values.all?(&:nil?)
|
||||||
|
end
|
||||||
|
|
||||||
|
class Payload
|
||||||
|
def self.from_content(content) = content.sub(METADATA_PATTERN, "")
|
||||||
|
|
||||||
|
def self.normalize(payload) = payload.rstrip + "\n\n"
|
||||||
|
|
||||||
|
def self.normalized_from_content(content) = Payload.from_content(content).then { Payload.normalize(_1) }
|
||||||
|
|
||||||
|
def self.from_file(file_path) = normalized_from_content(File.read(file_path))
|
||||||
|
|
||||||
|
def self.checksum(payload) = "sha256:#{Digest::SHA256.hexdigest(payload)}"
|
||||||
|
|
||||||
|
def self.checksum_from_content(content) = checksum(from_content(content))
|
||||||
|
end
|
||||||
|
|
||||||
|
class Package
|
||||||
|
extend Forwardable
|
||||||
|
attr_reader :content, :payload, :metadata, :was_bare_file
|
||||||
|
|
||||||
|
def_delegators :@metadata,
|
||||||
|
:url, :url=,
|
||||||
|
:filename, :filename=,
|
||||||
|
:payload_version, :payload_version=,
|
||||||
|
:payload_timestamp, :payload_timestamp=,
|
||||||
|
:payload_checksum, :payload_checksum=
|
||||||
|
|
||||||
|
def self.from_file(file_path)
|
||||||
|
if File.exist?(file_path)
|
||||||
|
new(content: File.read(file_path))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize(content:)
|
||||||
|
@content = content
|
||||||
|
@payload = Payload.normalized_from_content(@content)
|
||||||
|
@metadata = Metadata.from_content(@content)
|
||||||
|
|
||||||
|
if is_bare_file?
|
||||||
|
@was_bare_file = true
|
||||||
|
init_metadata
|
||||||
|
else
|
||||||
|
@was_bare_file = false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def is_bare_file? = @metadata.empty?
|
||||||
|
|
||||||
|
def init_metadata
|
||||||
|
@metadata.url ||= url
|
||||||
|
@metadata.filename ||= filename
|
||||||
|
@metadata.payload_checksum ||= Payload.checksum_from_content(content)
|
||||||
|
@metadata.payload_timestamp ||= payload_timestamp
|
||||||
|
end
|
||||||
|
|
||||||
|
def save(path, filename = nil)
|
||||||
|
path = File.join(path, filename || @metadata.filename) if File.directory?(path)
|
||||||
|
|
||||||
|
File.write(path, generate_package)
|
||||||
|
end
|
||||||
|
|
||||||
|
def verify_payload
|
||||||
|
return false if metadata.payload_checksum.nil? || metadata.payload_checksum&.empty?
|
||||||
|
Payload.checksum(payload) == metadata.payload_checksum
|
||||||
|
end
|
||||||
|
|
||||||
|
def payload_timestamp_as_time
|
||||||
|
@metadata&.payload_timestamp ? Time.parse(@metadata.payload_timestamp) : nil
|
||||||
|
end
|
||||||
|
|
||||||
|
def modified? = !verify_payload
|
||||||
|
|
||||||
|
def inspect_metadata = puts JSON.pretty_generate(@metadata.to_h)
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def generate_package
|
||||||
|
@metadata.url = url.to_s
|
||||||
|
metadata_block = generate_metadata
|
||||||
|
if METADATA_PATTERN.match?(content)
|
||||||
|
content.sub(METADATA_PATTERN, "\n#{metadata_block}")
|
||||||
|
else
|
||||||
|
[content.rstrip, "\n#{metadata_block}"].join("\n")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# This will need a comment style one day, to work with other languages
|
||||||
|
def generate_metadata
|
||||||
|
yaml_content = @metadata.to_h.transform_keys(&:to_s).to_yaml.strip
|
||||||
|
[
|
||||||
|
"# @PICOPACKAGE_START",
|
||||||
|
yaml_content.lines.map { |line| "# #{line}" }.join,
|
||||||
|
"# @PICOPACKAGE_END",
|
||||||
|
""
|
||||||
|
].join("\n")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -1,4 +1,7 @@
|
|||||||
module Picop
|
require "time"
|
||||||
|
require "pathname"
|
||||||
|
|
||||||
|
module Picopackage
|
||||||
class Provider
|
class Provider
|
||||||
def self.for(url)
|
def self.for(url)
|
||||||
PROVIDERS.each do |provider|
|
PROVIDERS.each do |provider|
|
||||||
@@ -17,114 +20,119 @@ module Picop
|
|||||||
end
|
end
|
||||||
|
|
||||||
# Base class for fetching content from a URL
|
# Base class for fetching content from a URL
|
||||||
# The variable `body` will contain the content retrieved from the URL
|
# The variable `body` will contain the package_data retrieved from the URL
|
||||||
# The variable `content` will contain both and code + metadata - this would be writen to a file.
|
# The variable `package_data` will contain both and payload + metadata - this would be writen to a file.
|
||||||
# The variable `code` will contain the code extracted from `content`
|
# The variable `payload` will contain the payload extracted from `package_data`
|
||||||
# The variable `metadata` will contain the metadata extracted from `content`
|
# The variable `metadata` will contain the metadata extracted from `package_data`
|
||||||
|
|
||||||
# Job of the Provider class is to fetch the body from the URL, and then extract the content and the filename from the body
|
# Job of the Provider class is to fetch the body from the URL, and then extract the package_data
|
||||||
# The SourceFile class will then take the body and split it into code and metadata
|
# and the filename from the body. The Package class will then take the body and split it into payload and metadata
|
||||||
|
|
||||||
class DefaultProvider
|
class DefaultProvider
|
||||||
MAX_SIZE = 1024 * 1024
|
MAX_SIZE = 1024 * 1024
|
||||||
TIMEOUT = 10
|
TIMEOUT = 10
|
||||||
|
attr_reader :url, :package
|
||||||
attr_reader :url, :source_file
|
|
||||||
|
|
||||||
def self.handles_url?(url) = :maybe
|
def self.handles_url?(url) = :maybe
|
||||||
|
|
||||||
def initialize(url)
|
def initialize(url, fetcher: Fetch.new(max_size: MAX_SIZE, timeout: TIMEOUT))
|
||||||
@url = transform_url(url)
|
@url = transform_url(url)
|
||||||
@uri = URI(@url)
|
@fetcher = fetcher
|
||||||
@body = nil
|
@package = Package.new(content: content)
|
||||||
@content = nil
|
populate_metadata
|
||||||
end
|
end
|
||||||
|
|
||||||
def transform_url(url) = url
|
def transform_url(url) = URI(url)
|
||||||
|
|
||||||
def body = @body ||= fetch
|
def body
|
||||||
|
@body ||= @fetcher.fetch(@url)
|
||||||
|
rescue Fetch::Error => e
|
||||||
|
raise FetchError, e.message
|
||||||
|
end
|
||||||
|
|
||||||
def fetch
|
def json_body
|
||||||
begin
|
@json_body ||= JSON.parse(body)
|
||||||
Net::HTTP.start(@uri.host, @uri.port, use_ssl: @uri.scheme == 'https', read_timeout: TIMEOUT, open_timeout: TIMEOUT) do |http|
|
rescue JSON::ParserError
|
||||||
http.request_get(@uri.path) do |response|
|
raise FetchError, "Failed to parse JSON response"
|
||||||
raise "Unexpected response: #{response.code}" unless response.is_a?(Net::HTTPSuccess)
|
end
|
||||||
|
|
||||||
@body = String.new(capacity: MAX_SIZE)
|
def payload_timestamp = Time.now.httpdate
|
||||||
response.read_body do |chunk|
|
|
||||||
if @body.bytesize + chunk.bytesize > MAX_SIZE
|
|
||||||
raise FileTooLargeError, "Response would exceed #{MAX_SIZE} bytes"
|
|
||||||
end
|
|
||||||
@body << chunk
|
|
||||||
end
|
|
||||||
@body
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
@body
|
|
||||||
end
|
|
||||||
|
|
||||||
def handles_body?
|
def handles_body?
|
||||||
true
|
true
|
||||||
rescue FileTooLargeError
|
rescue FileTooLargeError, Net::HTTPError, RuntimeError
|
||||||
false
|
false
|
||||||
end
|
end
|
||||||
|
|
||||||
def content
|
# Implement in subclass - this come from the `body`.
|
||||||
# Implement in subclass - this come from the `body`. Spliting content into code and metadata is the job of the SourceFile class
|
# Spliting content into payload and metadata is the job of the Package class
|
||||||
raise NotImplementedError
|
def content = body
|
||||||
end
|
|
||||||
|
|
||||||
def filename
|
|
||||||
# Implement in subclass - this should return the filename extracted from the body - if it exists, but not from the metadata
|
# Implement in subclass - this should return the filename extracted from the body - if it exists, but not from the metadata
|
||||||
raise NotImplementedError
|
def filename = File.basename @url
|
||||||
end
|
|
||||||
|
|
||||||
def source_file
|
def populate_metadata
|
||||||
@source_file ||= SourceFile.from_content(content)
|
@package.filename ||= filename
|
||||||
|
@package.url ||= @url
|
||||||
|
@package.payload_timestamp ||= payload_timestamp
|
||||||
|
@package.payload_checksum ||= Payload.checksum(content)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class GithubGistProvider < DefaultProvider
|
class GithubGistProvider < DefaultProvider
|
||||||
def self.handles_url?(url) = url.match?(%r{gist\.github\.com})
|
def self.handles_url?(url) = url.match?(%r{gist\.github\.com})
|
||||||
|
|
||||||
|
def content = json_body["files"].values.first["content"]
|
||||||
|
|
||||||
|
def filename = json_body["files"].values.first["filename"]
|
||||||
|
|
||||||
def transform_url(url)
|
def transform_url(url)
|
||||||
gist_id = url[/gist\.github\.com\/[^\/]+\/([a-f0-9]+)/, 1]
|
gist_id = url[/gist\.github\.com\/[^\/]+\/([a-f0-9]+)/, 1]
|
||||||
"https://api.github.com/gists/#{gist_id}"
|
"https://api.github.com/gists/#{gist_id}"
|
||||||
end
|
end
|
||||||
|
|
||||||
def content
|
def payload_timestamp
|
||||||
data = JSON.parse(body)
|
Time.parse(json_body["created_at"])
|
||||||
file = data["files"].values.first["content"]
|
rescue ArgumentError
|
||||||
end
|
nil
|
||||||
|
|
||||||
def filename
|
|
||||||
data = JSON.parse(body)
|
|
||||||
data["files"].values.first["filename"]
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class OpenGistProvider < DefaultProvider
|
class OpenGistProvider < DefaultProvider
|
||||||
def handles_url?(url)
|
def handles_url?(url) = :maybe
|
||||||
:maybe
|
|
||||||
|
def transform_url(url) = "#{url}.json"
|
||||||
|
|
||||||
|
def content = json_body.dig("files", 0, "content")
|
||||||
|
|
||||||
|
def filename = json_body.dig("files", 0, "filename")
|
||||||
|
|
||||||
|
def handles_body?
|
||||||
|
content && filename
|
||||||
|
rescue FileTooLargeError, Net::HTTPError, RuntimeError
|
||||||
|
false
|
||||||
|
end
|
||||||
|
# If we successfully fetch the body, and the body contains content and a filename, then we can handle the body
|
||||||
end
|
end
|
||||||
|
|
||||||
def transform_url(url)
|
class FileProvider < DefaultProvider
|
||||||
"#{url}.json"
|
def self.handles_url?(url) = File.exist?(url)
|
||||||
end
|
|
||||||
|
|
||||||
def content
|
def transform_url(url) = Pathname(url)
|
||||||
data = JSON.parse(body)
|
|
||||||
@content = data.dig("files",0, "content")
|
|
||||||
end
|
|
||||||
|
|
||||||
def filename
|
def content = url.read
|
||||||
data = JSON.parse(body)
|
|
||||||
data.dig("files",0, "filename")
|
def filename = url.basename.to_s
|
||||||
|
|
||||||
|
def payload_timestamp
|
||||||
|
url.mtime.httpdate
|
||||||
|
rescue Errno::ENOENT
|
||||||
|
nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
PROVIDERS = [
|
PROVIDERS = [
|
||||||
|
FileProvider,
|
||||||
GithubGistProvider,
|
GithubGistProvider,
|
||||||
OpenGistProvider,
|
OpenGistProvider,
|
||||||
DefaultProvider
|
DefaultProvider
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
module Picop
|
module Picopackage
|
||||||
module Scanner
|
module Scanner
|
||||||
def self.scan(directory, pattern: "**/*")
|
def self.scan(directory, pattern: "**/*")
|
||||||
Dir.glob(File.join(directory, pattern)).select do |file|
|
Dir.glob(File.join(directory, pattern)).select do |file|
|
||||||
next unless File.file?(file)
|
next unless File.file?(file)
|
||||||
content = File.read(file)
|
content = File.read(file)
|
||||||
content.match?(SourceFile::METADATA_PATTERN)
|
content.match?(Package::METADATA_PATTERN)
|
||||||
end.map { |file| SourceFile.new(file) }
|
end.map { |file| Package.new(file) }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -1,109 +0,0 @@
|
|||||||
require "yaml"
|
|
||||||
require "digest"
|
|
||||||
|
|
||||||
module Picop
|
|
||||||
class SourceFile
|
|
||||||
attr_reader :content, :metadata, :code, :original_path
|
|
||||||
|
|
||||||
METADATA_PATTERN = /^\n*#\s*@PICOPACKAGE_START\n(.*?)^\s*#\s*@PICOPACKAGE_END\s*$/m
|
|
||||||
|
|
||||||
def self.from_file(file_path) = new(content: File.read(file_path), original_path: file_path)
|
|
||||||
|
|
||||||
def self.from_content(content, filename: nil)
|
|
||||||
instance = new(content: content)
|
|
||||||
if filename && !instance.metadata['filename']
|
|
||||||
metadata = instance.metadata.merge('filename' => filename)
|
|
||||||
instance.update_metadata(metadata) #TODO: FIX THIS
|
|
||||||
end
|
|
||||||
instance
|
|
||||||
end
|
|
||||||
|
|
||||||
def initialize(content:, original_path: nil)
|
|
||||||
@original_path = original_path
|
|
||||||
|
|
||||||
@content = content
|
|
||||||
@metadata = extract_metadata
|
|
||||||
@code = extract_code
|
|
||||||
end
|
|
||||||
|
|
||||||
def filename = @metadata['filename']
|
|
||||||
|
|
||||||
def version = @metadata['version'] || '0.0.0'
|
|
||||||
|
|
||||||
def checksum = "sha256:#{Digest::SHA256.hexdigest(code)}"
|
|
||||||
|
|
||||||
def inspect_metadata = puts JSON.pretty_generate(@metadata)
|
|
||||||
|
|
||||||
def save(destination = nil)
|
|
||||||
path = determine_save_path(destination)
|
|
||||||
File.write(path, content)
|
|
||||||
path
|
|
||||||
end
|
|
||||||
|
|
||||||
def extract_code = content.sub(METADATA_PATTERN, '')
|
|
||||||
|
|
||||||
def extract_metadata
|
|
||||||
return {} unless content =~ METADATA_PATTERN
|
|
||||||
|
|
||||||
yaml_content = $1.lines.map do |line|
|
|
||||||
line.sub(/^\s*#\s?/, '').rstrip
|
|
||||||
end.join("\n")
|
|
||||||
|
|
||||||
YAML.safe_load(yaml_content)
|
|
||||||
end
|
|
||||||
|
|
||||||
def update_metadata(metadata_hash)
|
|
||||||
@metadata = metadata_hash
|
|
||||||
@content = generate_content
|
|
||||||
end
|
|
||||||
|
|
||||||
def sign
|
|
||||||
hash = checksum
|
|
||||||
return puts "File already signed" if metadata['content_checksum'] == hash
|
|
||||||
|
|
||||||
new_metadata = metadata.merge('content_checksum' => hash)
|
|
||||||
update_metadata(new_metadata)
|
|
||||||
save
|
|
||||||
end
|
|
||||||
|
|
||||||
def verify
|
|
||||||
return false unless metadata.key? 'content_checksum'
|
|
||||||
checksum == metadata['content_checksum']
|
|
||||||
end
|
|
||||||
|
|
||||||
def modified? = !verify
|
|
||||||
|
|
||||||
private
|
|
||||||
|
|
||||||
def generate_content
|
|
||||||
metadata_block = generate_metadata
|
|
||||||
if content =~ METADATA_PATTERN
|
|
||||||
content.sub(METADATA_PATTERN, "\n#{metadata_block}")
|
|
||||||
else
|
|
||||||
[content.rstrip, "\n#{metadata_block}"].join("\n")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# This will need a comment style one day, to work with other languages
|
|
||||||
def generate_metadata
|
|
||||||
yaml_content = @metadata.to_yaml.strip
|
|
||||||
[
|
|
||||||
"# @PICOPACKAGE_START",
|
|
||||||
yaml_content.lines.map { |line| "# #{line}" }.join,
|
|
||||||
"# @PICOPACKAGE_END",
|
|
||||||
""
|
|
||||||
].join("\n")
|
|
||||||
end
|
|
||||||
|
|
||||||
def determine_save_path(destination)
|
|
||||||
if destination.nil?
|
|
||||||
@original_path || filename || raise("No filename available")
|
|
||||||
elsif File.directory?(destination)
|
|
||||||
File.join(destination, filename || File.basename(@original_path))
|
|
||||||
else
|
|
||||||
destination
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
||||||
end
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
module Picop
|
module Picopackage
|
||||||
VERSION = "0.1.0"
|
VERSION = "0.2.1"
|
||||||
end
|
end
|
||||||
|
|||||||
25
notes.md
Normal file
25
notes.md
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
## Package Installation Flow
|
||||||
|
|
||||||
|
1. **Fetch URL**
|
||||||
|
└─> Download content
|
||||||
|
└─> Build Package Instance (Metadata & Payload)
|
||||||
|
└─> Check local file status
|
||||||
|
|
||||||
|
2. **Local File Check**
|
||||||
|
├─> If file doesn't exist:
|
||||||
|
│ └─> Save file
|
||||||
|
│
|
||||||
|
└─> If file exists:
|
||||||
|
└─> Compare versions
|
||||||
|
├─> If older/same:
|
||||||
|
│ └─> "Package already installed"
|
||||||
|
│
|
||||||
|
└─> If newer:
|
||||||
|
└─> Check local modifications
|
||||||
|
├─> If modified:
|
||||||
|
│ └─> "Local modifications detected"
|
||||||
|
│ └─> "Use 'update <file_path>'"
|
||||||
|
│
|
||||||
|
└─> If unmodified:
|
||||||
|
└─> "Update available"
|
||||||
|
└─> "Use 'update <file_path> -f' to force update"
|
||||||
@@ -8,17 +8,17 @@ Gem::Specification.new do |spec|
|
|||||||
spec.authors = ["Dan Milne"]
|
spec.authors = ["Dan Milne"]
|
||||||
spec.email = ["d@nmilne.com"]
|
spec.email = ["d@nmilne.com"]
|
||||||
|
|
||||||
spec.summary = "TODO: Write a short summary, because RubyGems requires one."
|
spec.summary = "Picopackage Tool."
|
||||||
spec.description = "TODO: Write a longer description or delete this line."
|
spec.description = "Picopackage Tool for managing Picopackages."
|
||||||
spec.homepage = "TODO: Put your gem's website or public repo URL here."
|
spec.homepage = "https://picopackage.org"
|
||||||
spec.license = "MIT"
|
spec.license = "MIT"
|
||||||
spec.required_ruby_version = ">= 3.1.0"
|
spec.required_ruby_version = ">= 3.1.0"
|
||||||
|
|
||||||
spec.metadata["allowed_push_host"] = "TODO: Set to your gem server 'https://example.com'"
|
# spec.metadata["allowed_push_host"] = "TODO: Set to your gem server 'https://example.com'"
|
||||||
|
|
||||||
spec.metadata["homepage_uri"] = spec.homepage
|
# spec.metadata["homepage_uri"] = spec.homepage
|
||||||
spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
|
# spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
|
||||||
spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
|
# spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
|
||||||
|
|
||||||
# Specify which files should be added to the gem when it is released.
|
# Specify which files should be added to the gem when it is released.
|
||||||
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
|
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
|
||||||
@@ -37,6 +37,7 @@ Gem::Specification.new do |spec|
|
|||||||
spec.add_dependency "yaml", "~> 0.4"
|
spec.add_dependency "yaml", "~> 0.4"
|
||||||
spec.add_dependency "digest"
|
spec.add_dependency "digest"
|
||||||
spec.add_development_dependency "debug"
|
spec.add_development_dependency "debug"
|
||||||
|
spec.add_development_dependency "standard"
|
||||||
|
|
||||||
# For more information and examples about making a new gem, check out our
|
# For more information and examples about making a new gem, check out our
|
||||||
# guide at: https://bundler.io/guides/creating_gem.html
|
# guide at: https://bundler.io/guides/creating_gem.html
|
||||||
|
|||||||
@@ -7,7 +7,13 @@ class TestPicopackage < Minitest::Test
|
|||||||
refute_nil ::Picopackage::VERSION
|
refute_nil ::Picopackage::VERSION
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_it_does_something_useful
|
def test_it_can_load_a_picopackage_file
|
||||||
assert false
|
sf = Picopackage::FileProvider.new(File.read("test/files/uniquify_array_packaged.rb"))
|
||||||
|
assert_equal "uniquify_array_packaged.rb", sf.metadata.filename
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_it_can_create_a_picopackage_from_bare_file
|
||||||
|
sf = Picopackage::FileProvider.new(File.read("test/files/uniquify_array_bare.rb"))
|
||||||
|
assert_equal "uniquify_array_bare.rb", sf.metadata.filename
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
Reference in New Issue
Block a user