Compare commits
5 Commits
a066b73f2d
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0cd0f0d7a | ||
|
|
f222c95e8a | ||
|
|
e0f7805599 | ||
|
|
ba4f8b9c2c | ||
|
|
4ea93f4c73 |
2
Gemfile
2
Gemfile
@@ -10,3 +10,5 @@ gem "rake", "~> 13.0"
|
||||
gem "minitest", "~> 5.16"
|
||||
|
||||
gem "rubocop", "~> 1.21"
|
||||
|
||||
gem "standard", "~> 1.44"
|
||||
|
||||
29
Gemfile.lock
29
Gemfile.lock
@@ -1,7 +1,7 @@
|
||||
PATH
|
||||
remote: .
|
||||
specs:
|
||||
picopackage (0.2.0)
|
||||
picopackage (0.2.1)
|
||||
digest
|
||||
open-uri (~> 0.5)
|
||||
yaml (~> 0.4)
|
||||
@@ -14,22 +14,27 @@ GEM
|
||||
debug (1.10.0)
|
||||
irb (~> 1.10)
|
||||
reline (>= 0.3.8)
|
||||
digest (3.1.1)
|
||||
digest (3.2.0)
|
||||
io-console (0.8.0)
|
||||
irb (1.14.3)
|
||||
irb (1.15.1)
|
||||
pp (>= 0.6.0)
|
||||
rdoc (>= 4.0.0)
|
||||
reline (>= 0.4.2)
|
||||
json (2.9.1)
|
||||
language_server-protocol (3.17.0.3)
|
||||
lint_roller (1.1.0)
|
||||
minitest (5.25.4)
|
||||
open-uri (0.5.0)
|
||||
stringio
|
||||
time
|
||||
uri
|
||||
parallel (1.26.3)
|
||||
parser (3.3.6.0)
|
||||
parser (3.3.7.0)
|
||||
ast (~> 2.4.1)
|
||||
racc
|
||||
pp (0.6.2)
|
||||
prettyprint
|
||||
prettyprint (0.2.0)
|
||||
psych (5.2.3)
|
||||
date
|
||||
stringio
|
||||
@@ -53,7 +58,22 @@ GEM
|
||||
unicode-display_width (>= 2.4.0, < 4.0)
|
||||
rubocop-ast (1.37.0)
|
||||
parser (>= 3.3.1.0)
|
||||
rubocop-performance (1.23.1)
|
||||
rubocop (>= 1.48.1, < 2.0)
|
||||
rubocop-ast (>= 1.31.1, < 2.0)
|
||||
ruby-progressbar (1.13.0)
|
||||
standard (1.44.0)
|
||||
language_server-protocol (~> 3.17.0.2)
|
||||
lint_roller (~> 1.0)
|
||||
rubocop (~> 1.70.0)
|
||||
standard-custom (~> 1.0.0)
|
||||
standard-performance (~> 1.6)
|
||||
standard-custom (1.0.2)
|
||||
lint_roller (~> 1.0)
|
||||
rubocop (~> 1.50)
|
||||
standard-performance (1.6.0)
|
||||
lint_roller (~> 1.1)
|
||||
rubocop-performance (~> 1.23.0)
|
||||
stringio (3.1.2)
|
||||
time (0.4.1)
|
||||
date
|
||||
@@ -73,6 +93,7 @@ DEPENDENCIES
|
||||
picopackage!
|
||||
rake (~> 13.0)
|
||||
rubocop (~> 1.21)
|
||||
standard (~> 1.44)
|
||||
|
||||
BUNDLED WITH
|
||||
2.6.2
|
||||
|
||||
18
README.md
18
README.md
@@ -1,28 +1,16 @@
|
||||
# Picopackage
|
||||
|
||||
TODO: Delete this and the text below, and describe your gem
|
||||
|
||||
Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/picopackge`. To experiment with that code, run `bin/console` for an interactive prompt.
|
||||
A command line tool for installing and managing [Picopackages](https://picopackage.org).
|
||||
|
||||
## Installation
|
||||
|
||||
TODO: Replace `UPDATE_WITH_YOUR_GEM_NAME_IMMEDIATELY_AFTER_RELEASE_TO_RUBYGEMS_ORG` with your gem name right after releasing it to RubyGems.org. Please do not do it earlier due to security reasons. Alternatively, replace this section with instructions to install your gem from git if you don't plan to release to RubyGems.org.
|
||||
|
||||
Install the gem and add to the application's Gemfile by executing:
|
||||
|
||||
```bash
|
||||
bundle add UPDATE_WITH_YOUR_GEM_NAME_IMMEDIATELY_AFTER_RELEASE_TO_RUBYGEMS_ORG
|
||||
```
|
||||
|
||||
If bundler is not being used to manage dependencies, install the gem by executing:
|
||||
|
||||
```bash
|
||||
gem install UPDATE_WITH_YOUR_GEM_NAME_IMMEDIATELY_AFTER_RELEASE_TO_RUBYGEMS_ORG
|
||||
gem install picopackage
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
TODO: Write usage instructions here
|
||||
`picopackage install <url|filepath>`
|
||||
|
||||
## Development
|
||||
|
||||
|
||||
7
Rakefile
7
Rakefile
@@ -5,8 +5,9 @@ require "minitest/test_task"
|
||||
|
||||
Minitest::TestTask.create
|
||||
|
||||
require "rubocop/rake_task"
|
||||
# require "rubocop/rake_task"
|
||||
# RuboCop::RakeTask.new
|
||||
|
||||
RuboCop::RakeTask.new
|
||||
require "standard/rake"
|
||||
|
||||
task default: %i[test rubocop]
|
||||
task default: %i[test standard]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "bundler/setup"
|
||||
require "picop"
|
||||
require "picopackage"
|
||||
|
||||
# You can add fixtures and/or initialization code here to make experimenting
|
||||
# with your gem easier. You can also use a different console, if you like.
|
||||
|
||||
6
exe/ppkg
6
exe/ppkg
@@ -1,15 +1,15 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
# Add lib directory to load path
|
||||
lib_path = File.expand_path('../lib', __dir__)
|
||||
lib_path = File.expand_path("../lib", __dir__)
|
||||
$LOAD_PATH.unshift(lib_path) unless $LOAD_PATH.include?(lib_path)
|
||||
|
||||
require 'picopackage'
|
||||
require "picopackage"
|
||||
|
||||
begin
|
||||
Picopackage::CLI.run(ARGV)
|
||||
rescue => e
|
||||
warn "Error: #{e.message}"
|
||||
warn e.backtrace if ENV['DEBUG']
|
||||
warn e.backtrace if ENV["DEBUG"]
|
||||
exit 1
|
||||
end
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require_relative "picopackage/version"
|
||||
require_relative "picopackage/http_fetcher"
|
||||
require_relative "picopackage/provider"
|
||||
require_relative "picopackage/source_file"
|
||||
require_relative "picopackage/scanner"
|
||||
require_relative "picopackage/fetch"
|
||||
require_relative "picopackage/provider"
|
||||
require_relative "picopackage/package"
|
||||
require_relative "picopackage/scanner"
|
||||
require_relative "picopackage/cli"
|
||||
|
||||
module Picopackage
|
||||
class Error < StandardError; end
|
||||
|
||||
class FileTooLargeError < StandardError; end
|
||||
|
||||
class FetchError < StandardError; end
|
||||
|
||||
class LocalModificationError < StandardError; end
|
||||
end
|
||||
|
||||
@@ -5,71 +5,71 @@ module Picopackage
|
||||
def self.run(argv = ARGV)
|
||||
command = argv.shift
|
||||
case command
|
||||
when 'scan'
|
||||
when "scan"
|
||||
options = {}
|
||||
OptionParser.new do |opts|
|
||||
opts.banner = "Usage: ppkg scan [options] DIRECTORY"
|
||||
# opts.on('-v', '--verbose', 'Run verbosely') { |v| options[:verbose] = v }
|
||||
end.parse!(argv)
|
||||
|
||||
dir = argv.first || '.'
|
||||
Picopackage::Scanner.scan(dir).each {|f| puts f.file_path }
|
||||
dir = argv.first || "."
|
||||
Picopackage::Scanner.scan(dir).each { |f| puts f.file_path }
|
||||
|
||||
when 'digest'
|
||||
when "init"
|
||||
OptionParser.new do |opts|
|
||||
opts.banner = "Usage: ppkg digest FILE"
|
||||
opts.banner = "Usage: ppkg init FILE"
|
||||
end.parse!(argv)
|
||||
|
||||
file = argv.first
|
||||
Picopackage::SourceFile.from_file(file).digest!
|
||||
Picopackage::Package.from_file(file).init_metadata
|
||||
|
||||
when 'checksum'
|
||||
when "checksum"
|
||||
OptionParser.new do |opts|
|
||||
opts.banner = "Usage: ppkg checksum FILE"
|
||||
end.parse!(argv)
|
||||
file = argv.first
|
||||
puts Picopackage::SourceFile.from_file(file).checksum
|
||||
puts Picopackage::Package.from_file(file).checksum
|
||||
|
||||
when 'verify'
|
||||
when "verify"
|
||||
OptionParser.new do |opts|
|
||||
opts.banner = "Usage: ppkg sign FILE"
|
||||
end.parse!(argv)
|
||||
|
||||
path = argv.first
|
||||
source = SourceFile.from_file(path)
|
||||
source = Package.from_file(path)
|
||||
|
||||
if source.metadata['content_checksum'].nil?
|
||||
if source.metadata["content_checksum"].nil?
|
||||
puts "⚠️ No checksum found in #{path}"
|
||||
puts "Run 'ppkg sign #{path}' to add one"
|
||||
exit 1
|
||||
end
|
||||
|
||||
unless source.verify
|
||||
unless source.verify_payload
|
||||
puts "❌ Checksum verification failed for #{path}"
|
||||
puts "Expected: #{source.metadata['content_checksum']}"
|
||||
puts "Expected: #{source.metadata["content_checksum"]}"
|
||||
puts "Got: #{source.checksum}"
|
||||
exit 1
|
||||
end
|
||||
|
||||
puts "✅ #{path} verified successfully"
|
||||
|
||||
when 'inspect'
|
||||
when "inspect"
|
||||
OptionParser.new do |opts|
|
||||
opts.banner = "Usage: ppkg inspect FILE|DIRECTORY"
|
||||
end.parse!(argv)
|
||||
|
||||
path = argv.first
|
||||
Picopackage::SourceFile.from_file(path).inspect
|
||||
Picopackage::Package.from_file(path).inspect_metadata
|
||||
|
||||
when 'fetch'
|
||||
options = { force: false }
|
||||
when "fetch"
|
||||
options = {force: false}
|
||||
OptionParser.new do |opts|
|
||||
opts.banner = "Usage: ppkg fetch [options] URI [PATH]"
|
||||
opts.on('-f', '--force', 'Force fetch') { |f| options[:force] = f }
|
||||
opts.on("-f", "--force", "Force fetch") { |f| options[:force] = f }
|
||||
end.parse!(argv)
|
||||
|
||||
url = argv.shift
|
||||
path = argv.shift || '.' # use '.' if no path provided
|
||||
path = argv.shift || "." # use '.' if no path provided
|
||||
|
||||
if url.nil?
|
||||
puts "Error: URI is required"
|
||||
@@ -85,17 +85,17 @@ module Picopackage
|
||||
exit 1
|
||||
end
|
||||
|
||||
when 'update'
|
||||
options = { force: false }
|
||||
when "update"
|
||||
options = {force: false}
|
||||
OptionParser.new do |opts|
|
||||
opts.banner = "Usage: ppkg update [options] FILE"
|
||||
opts.on('-f', '--force', 'Force update') { |f| options[:force] = f }
|
||||
opts.on("-f", "--force", "Force update") { |f| options[:force] = f }
|
||||
end.parse!(argv)
|
||||
|
||||
file = argv.first
|
||||
source_file = SourceFile.from_file(file)
|
||||
package = Package.from_file(file)
|
||||
begin
|
||||
Fetch.fetch(source_file.url, File.dirname(file), force: options[:force])
|
||||
Fetch.fetch(package.url, File.dirname(file), force: options[:force])
|
||||
rescue LocalModificationError => e
|
||||
puts "Error: #{e.message}"
|
||||
rescue => e
|
||||
@@ -105,7 +105,7 @@ module Picopackage
|
||||
|
||||
else
|
||||
puts "Unknown command: #{command}"
|
||||
puts "Available commands: scan, sign, inspect, update"
|
||||
puts "Available commands: fetch, update, scan, sign, inspect"
|
||||
exit 1
|
||||
end
|
||||
rescue OptionParser::InvalidOption => e
|
||||
@@ -113,8 +113,28 @@ module Picopackage
|
||||
exit 1
|
||||
rescue => e
|
||||
puts "Error: #{e.message}"
|
||||
puts e.backtrace if ENV['DEBUG']
|
||||
puts e.backtrace if ENV["DEBUG"]
|
||||
exit 1
|
||||
end
|
||||
|
||||
def self.determine_script_source
|
||||
# Get the full path of the currently executing script
|
||||
current_path = File.expand_path($0)
|
||||
|
||||
# Check if script is in GEM_PATH
|
||||
gem_paths = Gem.path.map { |p| File.expand_path(p) }
|
||||
|
||||
is_gem = gem_paths.any? { |path| current_path.start_with?(path) }
|
||||
|
||||
if is_gem
|
||||
# Running from gem installation
|
||||
gem_name = File.basename(File.dirname(File.dirname(current_path)))
|
||||
version = File.basename(File.dirname(current_path))
|
||||
{source: :gem, path: current_path, gem_name: gem_name, version: version}
|
||||
else
|
||||
# Running from local installation
|
||||
{source: :local, path: current_path}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,104 +1,167 @@
|
||||
require 'net/http'
|
||||
require 'fileutils'
|
||||
require 'tempfile'
|
||||
require 'json'
|
||||
require 'debug'
|
||||
require "net/http"
|
||||
require "fileutils"
|
||||
require "tempfile"
|
||||
require "json"
|
||||
require "debug"
|
||||
|
||||
module Picopackage
|
||||
class Fetch
|
||||
class Error < StandardError; end
|
||||
class HTTPError < Error; end
|
||||
class FileTooLargeError < Error; end
|
||||
class NotModifiedError < Error; end # Add this
|
||||
class TooManyRedirectsError < Error; end # Add this
|
||||
|
||||
MAX_REDIRECTS = 5 # This constant is used but not defined
|
||||
|
||||
def initialize(max_size: 1024 * 1024, timeout: 10)
|
||||
@max_size = max_size
|
||||
@timeout = timeout
|
||||
end
|
||||
|
||||
def fetch(uri)
|
||||
case uri.scheme
|
||||
when "http", "https" then fetch_http(uri)
|
||||
when "file" then fetch_file(uri)
|
||||
else
|
||||
raise Error, "Unsupported scheme: #{uri.scheme}"
|
||||
end
|
||||
end
|
||||
|
||||
def self.fetch(url, destination, force: false)
|
||||
raise ArgumentError, "Destination directory does not exist: #{destination}" unless Dir.exist?(destination)
|
||||
|
||||
provider = Provider.for(url)
|
||||
source_file = provider.source_file
|
||||
package = provider.package
|
||||
file_path = File.join(destination, package.filename)
|
||||
|
||||
file_path = File.join(destination, source_file.filename)
|
||||
local_package = File.exist?(file_path) ? FileProvider.new(file_path).package : nil
|
||||
|
||||
if File.exist?(file_path) && force
|
||||
source_file.save(destination)
|
||||
elsif File.exist?(file_path)
|
||||
local_source_file = SourceFile.from_file(file_path)
|
||||
status = Status.compare(local_source_file, source_file)
|
||||
resolver = Resolver.new(package, local_package, file_path, force: force).resolve
|
||||
|
||||
if force
|
||||
source_file.save(destination)
|
||||
elsif status.modified?
|
||||
raise LocalModificationError, "#{status.message}. Use -f or --force to overwrite local version"
|
||||
elsif status.outdated?
|
||||
puts "Updated from #{local_source_file.version} to #{source_file.version}"
|
||||
source_file.save(destination)
|
||||
elsif status.up_to_date?
|
||||
puts status.message
|
||||
end
|
||||
|
||||
else
|
||||
source_file.save(destination)
|
||||
if source_file.imported?
|
||||
source_file.digest!
|
||||
puts "Picopackage created for #{source_file.filename}"
|
||||
else
|
||||
puts "Picopackage downloaded to #{file_path}"
|
||||
end
|
||||
case resolver[:state]
|
||||
when :kept, :updated
|
||||
puts resolver[:message]
|
||||
when :conflict
|
||||
raise LocalModificationError, resolver[:message]
|
||||
end
|
||||
provider.source_file
|
||||
provider.package
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def fetch_http(uri, etag = nil)
|
||||
Net::HTTP.start(uri.host, uri.port, connection_options(uri)) do |http|
|
||||
request = Net::HTTP::Get.new(uri.request_uri)
|
||||
request["If-None-Match"] = etag if etag
|
||||
|
||||
response = http.request(request)
|
||||
handle_response(response, uri)
|
||||
end
|
||||
end
|
||||
|
||||
def fetch_file(uri)
|
||||
File.read(uri.path)
|
||||
end
|
||||
|
||||
def connection_options(uri)
|
||||
{
|
||||
use_ssl: uri.scheme == "https",
|
||||
read_timeout: @timeout,
|
||||
open_timeout: @timeout
|
||||
}
|
||||
end
|
||||
|
||||
def handle_response(response, uri)
|
||||
case response
|
||||
when Net::HTTPSuccess
|
||||
{
|
||||
body: read_body(response),
|
||||
etag: response["ETag"]
|
||||
}
|
||||
when Net::HTTPNotModified
|
||||
raise NotModifiedError.new("Resource not modified", etag: response["ETag"])
|
||||
when Net::HTTPRedirection
|
||||
handle_redirect(response, uri)
|
||||
else
|
||||
raise HTTPError, "HTTP #{response.code}: #{response.message}"
|
||||
end
|
||||
end
|
||||
|
||||
def handle_redirect(response, uri, redirect_count = 0)
|
||||
raise TooManyRedirectsError if redirect_count >= MAX_REDIRECTS
|
||||
location = response["location"]
|
||||
new_uri = URI(location)
|
||||
# Handle both relative paths and full URLs
|
||||
new_uri = uri.merge(location) if new_uri.relative?
|
||||
fetch(new_uri, redirect_count: redirect_count + 1)
|
||||
end
|
||||
|
||||
def read_body(response)
|
||||
buffer = String.new(capacity: @max_size)
|
||||
response.read_body do |chunk|
|
||||
raise FileTooLargeError, "Response would exceed #{@max_size} bytes" if buffer.bytesize + chunk.bytesize > @max_size
|
||||
buffer << chunk
|
||||
end
|
||||
buffer
|
||||
end
|
||||
end
|
||||
|
||||
class Status
|
||||
attr_reader :state, :local_version, :remote_version
|
||||
##
|
||||
# States:
|
||||
# - kept: local file was converted to a picopackage and kept
|
||||
# - updated: local file was updated with remote picopackage
|
||||
# - conflict: local and remote files differ - manually resolve or use -f to force
|
||||
class Resolver
|
||||
attr_reader :remote, :local, :local_path, :force
|
||||
def initialize(remote_package, local_package, local_path, force: false)
|
||||
@remote = remote_package
|
||||
@local = local_package
|
||||
@local_path = local_path
|
||||
@force = force
|
||||
@same_checksum = @remote.payload_checksum == @local&.payload_checksum
|
||||
end
|
||||
|
||||
def self.compare(local_source_file, remote_source_file)
|
||||
return new(:outdated) if local_source_file.metadata.nil? || remote_source_file.metadata.nil?
|
||||
STATES = %i[kept updated conflict].freeze
|
||||
|
||||
local_version = local_source_file.metadata["version"]
|
||||
remote_version = remote_source_file.metadata["version"]
|
||||
|
||||
if local_version == remote_version
|
||||
if local_source_file.modified?
|
||||
new(:modified, local_version:)
|
||||
def resolve
|
||||
validate_state_hash(
|
||||
if @force
|
||||
@remote.save(local_path)
|
||||
{state: :updated, message: "Force mode: overwrote local file with remote package"}
|
||||
elsif @local.nil?
|
||||
@remote.save(local_path)
|
||||
{state: :kept, message: "Saved Package as new file"}
|
||||
elsif @remote.payload_version != @local.payload_version
|
||||
{state: :conflict, message: "Version conflict. Local: #{@local.payload_version}, Remote: #{@remote.payload_version}"}
|
||||
elsif @remote.payload_timestamp_as_time > @local.payload_timestamp_as_time
|
||||
@remote.save(local_path)
|
||||
{state: :updated, message: "Updated to newer version"}
|
||||
elsif !@same_checksum
|
||||
handle_checksum_mismatch
|
||||
elsif @local.was_bare_file
|
||||
debugger
|
||||
@local.save(local_path)
|
||||
{state: :kept, message: "Packaged existing file as Picopackage"}
|
||||
else
|
||||
new(:up_to_date, local_version:)
|
||||
{state: :kept, message: "Local file is up to date"}
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def validate_state_hash(hash)
|
||||
raise "Invalid state" unless STATES.include?(hash[:state])
|
||||
raise "Missing message" unless hash[:message].is_a?(String)
|
||||
hash
|
||||
end
|
||||
|
||||
def handle_checksum_mismatch
|
||||
if @force
|
||||
@remote.save(local_path) # In force mode, remote wins
|
||||
{state: :updated, message: "Overwrote local file with remote package"}
|
||||
else
|
||||
new(:outdated,
|
||||
local_version:,
|
||||
remote_version:,
|
||||
modified: local_source_file.modified?
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(state, local_version: nil, remote_version: nil, modified: false)
|
||||
@state = state
|
||||
@local_version = local_version
|
||||
@remote_version = remote_version
|
||||
@modified = modified
|
||||
end
|
||||
|
||||
def modified?
|
||||
@modified || @state == :modified
|
||||
end
|
||||
|
||||
def up_to_date?
|
||||
@state == :up_to_date
|
||||
end
|
||||
|
||||
def outdated?
|
||||
@state == :outdated
|
||||
end
|
||||
|
||||
def message
|
||||
case state
|
||||
when :up_to_date
|
||||
"Picopackage is up to date"
|
||||
when :outdated
|
||||
if modified?
|
||||
"Local Picopackage (v#{local_version}) has modifications but remote version (v#{remote_version}) is available"
|
||||
else
|
||||
"Local Picopackage (v#{local_version}) is outdated. Remote version: v#{remote_version}"
|
||||
end
|
||||
when :modified
|
||||
"Local Picopackage has been modified from original version (v#{local_version})"
|
||||
{state: :conflict, message: "Files differ. Use --force to convert both to packages"}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
128
lib/picopackage/package.rb
Normal file
128
lib/picopackage/package.rb
Normal file
@@ -0,0 +1,128 @@
|
||||
require "yaml"
|
||||
require "digest"
|
||||
require "forwardable"
|
||||
|
||||
module Picopackage
|
||||
METADATA_PATTERN = /^\n*#\s*@PICOPACKAGE_START\n(.*?)^\s*#\s*@PICOPACKAGE_END\s*$/m
|
||||
|
||||
class Metadata < Struct.new(:url, :filename, :payload_version, :payload_timestamp, :payload_checksum, :etag, keyword_init: true)
|
||||
# the #from_file method will create a new instance of Metadata from a file path, rather than read a package's metadata
|
||||
def self.from_file(file_path, content: nil)
|
||||
new(content: File.read(file_path))
|
||||
end
|
||||
|
||||
def self.from_url_response(url, response)
|
||||
end
|
||||
|
||||
def self.from_content(content)
|
||||
return new unless content =~ METADATA_PATTERN
|
||||
|
||||
yaml_content = $1.each_line.map { |line| line.sub(/^\s*#\s?/, "").rstrip }.join("\n")
|
||||
|
||||
# Load and transform in one chain
|
||||
@metadata = new(**YAML.safe_load(yaml_content)
|
||||
.slice(*Metadata.members.map(&:to_s))
|
||||
.transform_keys(&:to_sym))
|
||||
rescue
|
||||
new # Return empty hash on any YAML/transformation errors
|
||||
end
|
||||
|
||||
def empty? = to_h.values.all?(&:nil?)
|
||||
end
|
||||
|
||||
class Payload
|
||||
def self.from_content(content) = content.sub(METADATA_PATTERN, "")
|
||||
|
||||
def self.normalize(payload) = payload.rstrip + "\n\n"
|
||||
|
||||
def self.normalized_from_content(content) = Payload.from_content(content).then { Payload.normalize(_1) }
|
||||
|
||||
def self.from_file(file_path) = normalized_from_content(File.read(file_path))
|
||||
|
||||
def self.checksum(payload) = "sha256:#{Digest::SHA256.hexdigest(payload)}"
|
||||
|
||||
def self.checksum_from_content(content) = checksum(from_content(content))
|
||||
end
|
||||
|
||||
class Package
|
||||
extend Forwardable
|
||||
attr_reader :content, :payload, :metadata, :was_bare_file
|
||||
|
||||
def_delegators :@metadata,
|
||||
:url, :url=,
|
||||
:filename, :filename=,
|
||||
:payload_version, :payload_version=,
|
||||
:payload_timestamp, :payload_timestamp=,
|
||||
:payload_checksum, :payload_checksum=
|
||||
|
||||
def self.from_file(file_path)
|
||||
if File.exist?(file_path)
|
||||
new(content: File.read(file_path))
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(content:)
|
||||
@content = content
|
||||
@payload = Payload.normalized_from_content(@content)
|
||||
@metadata = Metadata.from_content(@content)
|
||||
|
||||
if is_bare_file?
|
||||
@was_bare_file = true
|
||||
init_metadata
|
||||
else
|
||||
@was_bare_file = false
|
||||
end
|
||||
end
|
||||
|
||||
def is_bare_file? = @metadata.empty?
|
||||
|
||||
def init_metadata
|
||||
@metadata.url ||= url
|
||||
@metadata.filename ||= filename
|
||||
@metadata.payload_checksum ||= Payload.checksum_from_content(content)
|
||||
@metadata.payload_timestamp ||= payload_timestamp
|
||||
end
|
||||
|
||||
def save(path, filename = nil)
|
||||
path = File.join(path, filename || @metadata.filename) if File.directory?(path)
|
||||
|
||||
File.write(path, generate_package)
|
||||
end
|
||||
|
||||
def verify_payload
|
||||
return false if metadata.payload_checksum.nil? || metadata.payload_checksum&.empty?
|
||||
Payload.checksum(payload) == metadata.payload_checksum
|
||||
end
|
||||
|
||||
def payload_timestamp_as_time
|
||||
@metadata&.payload_timestamp ? Time.parse(@metadata.payload_timestamp) : nil
|
||||
end
|
||||
|
||||
def modified? = !verify_payload
|
||||
|
||||
def inspect_metadata = puts JSON.pretty_generate(@metadata.to_h)
|
||||
|
||||
private
|
||||
|
||||
def generate_package
|
||||
@metadata.url = url.to_s
|
||||
metadata_block = generate_metadata
|
||||
if METADATA_PATTERN.match?(content)
|
||||
content.sub(METADATA_PATTERN, "\n#{metadata_block}")
|
||||
else
|
||||
[content.rstrip, "\n#{metadata_block}"].join("\n")
|
||||
end
|
||||
end
|
||||
|
||||
# This will need a comment style one day, to work with other languages
|
||||
def generate_metadata
|
||||
yaml_content = @metadata.to_h.transform_keys(&:to_s).to_yaml.strip
|
||||
[
|
||||
"# @PICOPACKAGE_START",
|
||||
yaml_content.lines.map { |line| "# #{line}" }.join,
|
||||
"# @PICOPACKAGE_END",
|
||||
""
|
||||
].join("\n")
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,3 +1,6 @@
|
||||
require "time"
|
||||
require "pathname"
|
||||
|
||||
module Picopackage
|
||||
class Provider
|
||||
def self.for(url)
|
||||
@@ -17,94 +20,119 @@ module Picopackage
|
||||
end
|
||||
|
||||
# Base class for fetching content from a URL
|
||||
# The variable `body` will contain the content retrieved from the URL
|
||||
# The variable `content` will contain both and code + metadata - this would be writen to a file.
|
||||
# The variable `code` will contain the code extracted from `content`
|
||||
# The variable `metadata` will contain the metadata extracted from `content`
|
||||
# The variable `body` will contain the package_data retrieved from the URL
|
||||
# The variable `package_data` will contain both and payload + metadata - this would be writen to a file.
|
||||
# The variable `payload` will contain the payload extracted from `package_data`
|
||||
# The variable `metadata` will contain the metadata extracted from `package_data`
|
||||
|
||||
# Job of the Provider class is to fetch the body from the URL, and then extract the content and the filename from the body
|
||||
# The SourceFile class will then take the body and split it into code and metadata
|
||||
# Job of the Provider class is to fetch the body from the URL, and then extract the package_data
|
||||
# and the filename from the body. The Package class will then take the body and split it into payload and metadata
|
||||
|
||||
class DefaultProvider
|
||||
MAX_SIZE = 1024 * 1024
|
||||
TIMEOUT = 10
|
||||
attr_reader :url, :source_file
|
||||
attr_reader :url, :package
|
||||
|
||||
def self.handles_url?(url) = :maybe
|
||||
|
||||
def initialize(url)
|
||||
def initialize(url, fetcher: Fetch.new(max_size: MAX_SIZE, timeout: TIMEOUT))
|
||||
@url = transform_url(url)
|
||||
@uri = URI(@url)
|
||||
@body = nil
|
||||
@content = nil
|
||||
@fetcher = fetcher
|
||||
@package = Package.new(content: content)
|
||||
populate_metadata
|
||||
end
|
||||
|
||||
def body = @body ||= fetch
|
||||
def json_body = @json_body ||= JSON.parse(body)
|
||||
def transform_url(url) = url
|
||||
def transform_url(url) = URI(url)
|
||||
|
||||
def fetch
|
||||
begin
|
||||
Net::HTTP.start(@uri.host, @uri.port, use_ssl: @uri.scheme == 'https', read_timeout: TIMEOUT, open_timeout: TIMEOUT) do |http|
|
||||
http.request_get(@uri.path) do |response|
|
||||
raise "Unexpected response: #{response.code}" unless response.is_a?(Net::HTTPSuccess)
|
||||
|
||||
@body = String.new(capacity: MAX_SIZE)
|
||||
response.read_body do |chunk|
|
||||
if @body.bytesize + chunk.bytesize > MAX_SIZE
|
||||
raise FileTooLargeError, "Response would exceed #{MAX_SIZE} bytes"
|
||||
end
|
||||
@body << chunk
|
||||
end
|
||||
@body
|
||||
end
|
||||
end
|
||||
end
|
||||
@body
|
||||
def body
|
||||
@body ||= @fetcher.fetch(@url)
|
||||
rescue Fetch::Error => e
|
||||
raise FetchError, e.message
|
||||
end
|
||||
|
||||
def json_body
|
||||
@json_body ||= JSON.parse(body)
|
||||
rescue JSON::ParserError
|
||||
raise FetchError, "Failed to parse JSON response"
|
||||
end
|
||||
|
||||
def payload_timestamp = Time.now.httpdate
|
||||
|
||||
def handles_body?
|
||||
true
|
||||
rescue FileTooLargeError, Net::HTTPError, RuntimeError => e
|
||||
rescue FileTooLargeError, Net::HTTPError, RuntimeError
|
||||
false
|
||||
end
|
||||
|
||||
# Implement in subclass - this come from the `body`.
|
||||
# Spliting content into code and metadata is the job of the SourceFile class
|
||||
# Spliting content into payload and metadata is the job of the Package class
|
||||
def content = body
|
||||
|
||||
# Implement in subclass - this should return the filename extracted from the body - if it exists, but not from the metadata
|
||||
def filename = File.basename @url
|
||||
|
||||
def source_file
|
||||
@source_file ||= SourceFile.from_content(content, metadata: {'filename' => filename, 'url' => url, 'version' => '0.0.1'})
|
||||
def populate_metadata
|
||||
@package.filename ||= filename
|
||||
@package.url ||= @url
|
||||
@package.payload_timestamp ||= payload_timestamp
|
||||
@package.payload_checksum ||= Payload.checksum(content)
|
||||
end
|
||||
end
|
||||
|
||||
class GithubGistProvider < DefaultProvider
|
||||
def self.handles_url?(url) = url.match?(%r{gist\.github\.com})
|
||||
|
||||
def content = json_body["files"].values.first["content"]
|
||||
|
||||
def filename = json_body["files"].values.first["filename"]
|
||||
|
||||
def transform_url(url)
|
||||
gist_id = url[/gist\.github\.com\/[^\/]+\/([a-f0-9]+)/, 1]
|
||||
"https://api.github.com/gists/#{gist_id}"
|
||||
end
|
||||
|
||||
def payload_timestamp
|
||||
Time.parse(json_body["created_at"])
|
||||
rescue ArgumentError
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
class OpenGistProvider < DefaultProvider
|
||||
def handles_url?(url) = :maybe
|
||||
|
||||
def transform_url(url) = "#{url}.json"
|
||||
def content = json_body.dig("files",0, "content")
|
||||
def filename = json_body.dig("files",0, "filename")
|
||||
|
||||
def content = json_body.dig("files", 0, "content")
|
||||
|
||||
def filename = json_body.dig("files", 0, "filename")
|
||||
|
||||
def handles_body?
|
||||
content && filename
|
||||
rescue FileTooLargeError, Net::HTTPError, RuntimeError => e
|
||||
rescue FileTooLargeError, Net::HTTPError, RuntimeError
|
||||
false
|
||||
end
|
||||
# If we successfully fetch the body, and the body contains content and a filename, then we can handle the body
|
||||
end
|
||||
|
||||
class FileProvider < DefaultProvider
|
||||
def self.handles_url?(url) = File.exist?(url)
|
||||
|
||||
def transform_url(url) = Pathname(url)
|
||||
|
||||
def content = url.read
|
||||
|
||||
def filename = url.basename.to_s
|
||||
|
||||
def payload_timestamp
|
||||
url.mtime.httpdate
|
||||
rescue Errno::ENOENT
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
PROVIDERS = [
|
||||
FileProvider,
|
||||
GithubGistProvider,
|
||||
OpenGistProvider,
|
||||
DefaultProvider
|
||||
|
||||
@@ -4,8 +4,8 @@ module Picopackage
|
||||
Dir.glob(File.join(directory, pattern)).select do |file|
|
||||
next unless File.file?(file)
|
||||
content = File.read(file)
|
||||
content.match?(SourceFile::METADATA_PATTERN)
|
||||
end.map { |file| SourceFile.new(file) }
|
||||
content.match?(Package::METADATA_PATTERN)
|
||||
end.map { |file| Package.new(file) }
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,120 +0,0 @@
|
||||
require "yaml"
|
||||
require "digest"
|
||||
|
||||
module Picopackage
|
||||
class SourceFile
|
||||
attr_reader :content, :metadata, :code, :original_path
|
||||
|
||||
METADATA_PATTERN = /^\n*#\s*@PICOPACKAGE_START\n(.*?)^\s*#\s*@PICOPACKAGE_END\s*$/m
|
||||
|
||||
def self.from_file(file_path) = new(content: File.read(file_path), original_path: file_path)
|
||||
|
||||
def self.from_content(content, metadata: {})
|
||||
instance = new(content: content)
|
||||
instance.imported! if instance.metadata.empty?
|
||||
|
||||
updated_metadata = metadata.merge(instance.metadata)
|
||||
|
||||
## For new Picopackages, we should add metadata and checksum
|
||||
instance.update_metadata(updated_metadata)
|
||||
|
||||
instance
|
||||
end
|
||||
|
||||
def initialize(content:, original_path: nil)
|
||||
@original_path = original_path
|
||||
|
||||
@content = content
|
||||
@metadata = extract_metadata
|
||||
@code = extract_code
|
||||
end
|
||||
|
||||
def imported! = @imported = true
|
||||
|
||||
def imported? = @imported ||= false
|
||||
|
||||
def content = @content
|
||||
|
||||
def url = @metadata['url']
|
||||
|
||||
def filename = @metadata['filename']
|
||||
|
||||
def version = @metadata['version'] || '0.0.1'
|
||||
|
||||
def checksum = "sha256:#{Digest::SHA256.hexdigest(code)}"
|
||||
|
||||
def inspect_metadata = puts JSON.pretty_generate(@metadata)
|
||||
|
||||
def save(destination = nil)
|
||||
path = determine_save_path(destination)
|
||||
File.write(path, content)
|
||||
path
|
||||
end
|
||||
|
||||
def extract_code = content.sub(METADATA_PATTERN, '')
|
||||
|
||||
def extract_metadata
|
||||
return {} unless content =~ METADATA_PATTERN
|
||||
|
||||
yaml_content = $1.lines.map do |line|
|
||||
line.sub(/^\s*#\s?/, '').rstrip
|
||||
end.join("\n")
|
||||
|
||||
YAML.safe_load(yaml_content)
|
||||
end
|
||||
|
||||
def update_metadata(metadata_hash)
|
||||
@metadata = metadata_hash
|
||||
@content = generate_content
|
||||
end
|
||||
|
||||
def digest!
|
||||
hash = checksum
|
||||
return puts "File already has a checksum" if metadata['content_checksum'] == hash
|
||||
|
||||
new_metadata = metadata.merge('content_checksum' => hash)
|
||||
update_metadata(new_metadata)
|
||||
save
|
||||
end
|
||||
|
||||
def verify
|
||||
return false unless metadata.key? 'content_checksum'
|
||||
checksum == metadata['content_checksum']
|
||||
end
|
||||
|
||||
def modified? = !verify
|
||||
|
||||
private
|
||||
|
||||
def generate_content
|
||||
metadata_block = generate_metadata
|
||||
if content =~ METADATA_PATTERN
|
||||
content.sub(METADATA_PATTERN, "\n#{metadata_block}")
|
||||
else
|
||||
[content.rstrip, "\n#{metadata_block}"].join("\n")
|
||||
end
|
||||
end
|
||||
|
||||
# This will need a comment style one day, to work with other languages
|
||||
def generate_metadata
|
||||
yaml_content = @metadata.to_yaml.strip
|
||||
[
|
||||
"# @PICOPACKAGE_START",
|
||||
yaml_content.lines.map { |line| "# #{line}" }.join,
|
||||
"# @PICOPACKAGE_END",
|
||||
""
|
||||
].join("\n")
|
||||
end
|
||||
|
||||
def determine_save_path(destination)
|
||||
if destination.nil?
|
||||
@original_path || filename || raise("No filename available")
|
||||
elsif File.directory?(destination)
|
||||
File.join(destination, filename || File.basename(@original_path))
|
||||
else
|
||||
destination
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
@@ -1,5 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Picopackage
|
||||
VERSION = "0.2.0"
|
||||
VERSION = "0.2.1"
|
||||
end
|
||||
|
||||
25
notes.md
Normal file
25
notes.md
Normal file
@@ -0,0 +1,25 @@
|
||||
## Package Installation Flow
|
||||
|
||||
1. **Fetch URL**
|
||||
└─> Download content
|
||||
└─> Build Package Instance (Metadata & Payload)
|
||||
└─> Check local file status
|
||||
|
||||
2. **Local File Check**
|
||||
├─> If file doesn't exist:
|
||||
│ └─> Save file
|
||||
│
|
||||
└─> If file exists:
|
||||
└─> Compare versions
|
||||
├─> If older/same:
|
||||
│ └─> "Package already installed"
|
||||
│
|
||||
└─> If newer:
|
||||
└─> Check local modifications
|
||||
├─> If modified:
|
||||
│ └─> "Local modifications detected"
|
||||
│ └─> "Use 'update <file_path>'"
|
||||
│
|
||||
└─> If unmodified:
|
||||
└─> "Update available"
|
||||
└─> "Use 'update <file_path> -f' to force update"
|
||||
@@ -14,11 +14,11 @@ Gem::Specification.new do |spec|
|
||||
spec.license = "MIT"
|
||||
spec.required_ruby_version = ">= 3.1.0"
|
||||
|
||||
#spec.metadata["allowed_push_host"] = "TODO: Set to your gem server 'https://example.com'"
|
||||
# spec.metadata["allowed_push_host"] = "TODO: Set to your gem server 'https://example.com'"
|
||||
|
||||
#spec.metadata["homepage_uri"] = spec.homepage
|
||||
#spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
|
||||
#spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
|
||||
# spec.metadata["homepage_uri"] = spec.homepage
|
||||
# spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
|
||||
# spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
|
||||
|
||||
# Specify which files should be added to the gem when it is released.
|
||||
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
|
||||
@@ -37,6 +37,7 @@ Gem::Specification.new do |spec|
|
||||
spec.add_dependency "yaml", "~> 0.4"
|
||||
spec.add_dependency "digest"
|
||||
spec.add_development_dependency "debug"
|
||||
spec.add_development_dependency "standard"
|
||||
|
||||
# For more information and examples about making a new gem, check out our
|
||||
# guide at: https://bundler.io/guides/creating_gem.html
|
||||
|
||||
@@ -7,7 +7,13 @@ class TestPicopackage < Minitest::Test
|
||||
refute_nil ::Picopackage::VERSION
|
||||
end
|
||||
|
||||
def test_it_does_something_useful
|
||||
assert false
|
||||
def test_it_can_load_a_picopackage_file
|
||||
sf = Picopackage::FileProvider.new(File.read("test/files/uniquify_array_packaged.rb"))
|
||||
assert_equal "uniquify_array_packaged.rb", sf.metadata.filename
|
||||
end
|
||||
|
||||
def test_it_can_create_a_picopackage_from_bare_file
|
||||
sf = Picopackage::FileProvider.new(File.read("test/files/uniquify_array_bare.rb"))
|
||||
assert_equal "uniquify_array_bare.rb", sf.metadata.filename
|
||||
end
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user