-#
-#===============================================================================
-#
-# FILE: Admin.pm
-#
-# DESCRIPTION: Administrative functions for DBIx::Class Schemata
-#
-# FILES: ---
-# BUGS: ---
-# NOTES: ---
-# AUTHOR: Gordon Irving (), <Gordon.irving@sophos.com>
-# VERSION: 1.0
-# CREATED: 28/11/09 12:27:15 GMT
-# REVISION: ---
-#===============================================================================
-
package DBIx::Class::Admin;
+# check deps
+BEGIN {
+ use Carp::Clan qw/^DBIx::Class/;
+ use DBIx::Class;
+ croak('The following modules are required for DBIx::Class::Admin ' . DBIx::Class::Optional::Dependencies->req_missing_for ('admin') )
+ unless DBIx::Class::Optional::Dependencies->req_ok_for ('admin');
+}
+
use Moose;
-use MooseX::Types
- -declare => [qw( DBICConnectInfo )];
-use MooseX::Types::Moose qw/Int HashRef ArrayRef Str Any/;
+use MooseX::Types::Moose qw/Int Str Any Bool/;
+use DBIx::Class::Admin::Types qw/DBICConnectInfo DBICHashRef/;
use MooseX::Types::JSON qw(JSON);
use MooseX::Types::Path::Class qw(Dir File);
use Try::Tiny;
-use parent 'Class::C3::Componentised';
-
-use Data::Dumper;
-use JSON::Any;
-
-
-coerce ArrayRef,
- from JSON,
- via { _json_to_data ($_) };
-
-coerce HashRef,
- from JSON,
- via { _json_to_data($_) };
-
-subtype DBICConnectInfo,
- as ArrayRef;
-
-coerce DBICConnectInfo,
- from JSON,
- via { return _json_to_data($_) } ;
-
-coerce DBICConnectInfo,
- from Str,
- via { return _json_to_data($_) };
-
-coerce DBICConnectInfo,
- from HashRef,
- via { [ $_->{dsn}, $_->{user}, $_->{password} ] };
-#
-# ['lib|I:s' => 'Additonal library path to search in'],
-# ['schema|s:s' => 'The class of the schema to load', { required => 1 } ],
-# ['config-stanza|S:s' => 'Where in the config to find the connection_info, supply in form MyApp::Model::DB',],
-# ['config|C:s' => 'Supply the config file for parsing by Config::Any', { depends => 'config_stanza'} ],
-# ['connect-info|n:s%' => ' supply the connect info as additonal options ie -I dsn=<dsn> user=<user> password=<pass> '],
-# ['sql-dir|q:s' => 'The directory where sql diffs will be created'],
-# ['sql-type|t:s' => 'The RDBMs falvour you wish to use'],
-# ['version|v:i' => 'Supply a version install'],
-# ['preversion|p:s' => 'The previous version to diff against',],
-#
-# 'schema=s' => \my $schema_class,
-# 'class=s' => \my $resultset_class,
-# 'connect=s' => \my $connect,
-# 'op=s' => \my $op,
-# 'set=s' => \my $set,
-# 'where=s' => \my $where,
-# 'attrs=s' => \my $attrs,
-# 'format=s' => \my $format,
-# 'force' => \my $force,
-# 'trace' => \my $trace,
-# 'quiet' => \my $quiet,
-# 'help' => \my $help,
-# 'tlibs' => \my $t_libs,
-#=cut
+use JSON::Any qw(DWIW XS JSON);
+use namespace::autoclean;
=head1 NAME
=head1 SYNOPSIS
- use DBIx::Class::Admin;
+ $ dbicadmin --help
- # ddl manipulation
- my $admin = DBIx::Class::Admin->new(
- schema_class=> 'MY::Schema',
- sql_dir=> $sql_dir,
- connect_info => { dsn => $dsn, user => $user, password => $pass },
- );
+ $ dbicadmin --schema=MyApp::Schema \
+ --connect='["dbi:SQLite:my.db", "", ""]' \
+ --deploy
- # create SQLite sql
- $admin->create('SQLite');
+ $ dbicadmin --schema=MyApp::Schema --class=Employee \
+ --connect='["dbi:SQLite:my.db", "", ""]' \
+ --op=update --set='{ "name": "New_Employee" }'
- # create SQL diff for an upgrade
- $admin->create('SQLite', {} , "1.0");
+ use DBIx::Class::Admin;
- # upgrade a database
- $admin->upgrade();
+ # ddl manipulation
+ my $admin = DBIx::Class::Admin->new(
+ schema_class=> 'MY::Schema',
+ sql_dir=> $sql_dir,
+ connect_info => { dsn => $dsn, user => $user, password => $pass },
+ );
- # install a version for an unversioned schema
- $admin->install("3.0");
+ # create SQLite sql
+ $admin->create('SQLite');
-=head1 Attributes
+ # create SQL diff for an upgrade
+ $admin->create('SQLite', {} , "1.0");
-=head2 lib
+ # upgrade a database
+ $admin->upgrade();
-add a library search path
-=cut
-has lib => (
- is => 'ro',
- isa => Dir,
- coerce => 1,
- trigger => \&_set_inc,
-);
+ # install a version for an unversioned schema
+ $admin->install("3.0");
-sub _set_inc {
- my ($self, $lib) = @_;
- push @INC, $lib->stringify;
-}
+=head1 REQUIREMENTS
+
+The Admin interface has additional requirements not currently part of
+L<DBIx::Class>. See L<DBIx::Class::Optional::Dependencies> for more details.
+
+=head1 ATTRIBUTES
=head2 schema_class
the class of the schema to load
+
=cut
+
has 'schema_class' => (
- is => 'ro',
- isa => 'Str',
- coerce => 1,
+ is => 'ro',
+ isa => Str,
);
+
=head2 schema
A pre-connected schema object can be provided for manipulation
+
=cut
+
has 'schema' => (
- is => 'ro',
- isa => 'DBIx::Class::Schema',
- lazy_build => 1,
+ is => 'ro',
+ isa => 'DBIx::Class::Schema',
+ lazy_build => 1,
);
sub _build_schema {
- my ($self) = @_;
- $self->ensure_class_loaded($self->schema_class);
+ my ($self) = @_;
- $self->connect_info->[3]->{ignore_version} =1;
- return $self->schema_class->connect(@{$self->connect_info()} ); # , $self->connect_info->[3], { ignore_version => 1} );
+ require Class::MOP;
+ Class::MOP::load_class($self->schema_class);
+ $self->connect_info->[3]{ignore_version} = 1;
+ return $self->schema_class->connect(@{$self->connect_info});
}
=head2 resultset
a resultset from the schema to operate on
+
=cut
+
has 'resultset' => (
- is => 'rw',
- isa => Str,
+ is => 'rw',
+ isa => Str,
);
+
=head2 where
a hash ref or json string to be used for identifying data to manipulate
+
=cut
has 'where' => (
- is => 'rw',
- isa => HashRef,
- coerce => 1,
+ is => 'rw',
+ isa => DBICHashRef,
+ coerce => 1,
);
+
=head2 set
+
a hash ref or json string to be used for inserting or updating data
+
=cut
has 'set' => (
- is => 'rw',
- isa => HashRef,
- coerce => 1,
+ is => 'rw',
+ isa => DBICHashRef,
+ coerce => 1,
);
+
=head2 attrs
+
a hash ref or json string to be used for passing additonal info to the ->search call
+
=cut
+
has 'attrs' => (
- is => 'rw',
- isa => HashRef,
- coerce => 1,
+ is => 'rw',
+ isa => DBICHashRef,
+ coerce => 1,
);
+
+
=head2 connect_info
connect_info the arguments to provide to the connect call of the schema_class
-=cut
+=cut
has 'connect_info' => (
- is => 'ro',
- isa => DBICConnectInfo,
- lazy_build => 1,
- coerce => 1,
+ is => 'ro',
+ isa => DBICConnectInfo,
+ lazy_build => 1,
+ coerce => 1,
);
sub _build_connect_info {
- my ($self) = @_;
- return $self->_find_stanza($self->config, $self->config_stanza);
+ my ($self) = @_;
+ return $self->_find_stanza($self->config, $self->config_stanza);
}
+
=head2 config_file
config_file provide a config_file to read connect_info from, if this is provided
config_stanze should also be provided to locate where the connect_info is in the config
The config file should be in a format readable by Config::General
+
=cut
+
has config_file => (
- is => 'ro',
- isa => File,
- coerce => 1,
+ is => 'ro',
+ isa => File,
+ coerce => 1,
);
+
=head2 config_stanza
config_stanza for use with config_file should be a '::' deliminated 'path' to the connection information
designed for use with catalyst config files
+
=cut
+
has 'config_stanza' => (
- is => 'ro',
- isa => 'Str',
+ is => 'ro',
+ isa => Str,
);
+
=head2 config
-Instead of loading from a file the configuration can be provided directly as a hash ref. Please note
+Instead of loading from a file the configuration can be provided directly as a hash ref. Please note
config_stanza will still be required.
+
=cut
+
has config => (
- is => 'ro',
- isa => HashRef,
- lazy_build => 1,
+ is => 'ro',
+ isa => DBICHashRef,
+ lazy_build => 1,
);
sub _build_config {
- my ($self) = @_;
- try { require 'Config::Any'; } catch { die "Config::Any is required to parse the config file"; };
+ my ($self) = @_;
- my $cfg = Config::Any->load_files ( {files => [$self->config_file], use_ext =>1, flatten_to_hash=>1});
+ try { require Config::Any }
+ catch { die ("Config::Any is required to parse the config file.\n") };
- # just grab the config from the config file
- $cfg = $cfg->{$self->config_file};
- return $cfg;
+ my $cfg = Config::Any->load_files ( {files => [$self->config_file], use_ext =>1, flatten_to_hash=>1});
+
+ # just grab the config from the config file
+ $cfg = $cfg->{$self->config_file};
+ return $cfg;
}
+
=head2 sql_dir
The location where sql ddl files should be created or found for an upgrade.
+
=cut
+
has 'sql_dir' => (
- is => 'ro',
- isa => Dir,
- coerce => 1,
+ is => 'ro',
+ isa => Dir,
+ coerce => 1,
+);
+
+
+=head2 sql_type
+
+The type of sql dialect to use for creating sql files from schema
+
+=cut
+
+has 'sql_type' => (
+ is => 'ro',
+ isa => Str,
);
=head2 version
Used for install, the version which will be 'installed' in the schema
+
=cut
+
has version => (
- is => 'rw',
- isa => 'Str',
+ is => 'rw',
+ isa => Str,
);
+
=head2 preversion
Previouse version of the schema to create an upgrade diff for, the full sql for that version of the sql must be in the sql_dir
+
=cut
+
has preversion => (
- is => 'rw',
- isa => 'Str',
+ is => 'rw',
+ isa => Str,
);
+
=head2 force
Try and force certain operations.
+
=cut
+
has force => (
- is => 'rw',
- isa => 'Bool',
+ is => 'rw',
+ isa => Bool,
);
-=head2 quite
+
+=head2 quiet
Be less verbose about actions
+
=cut
+
has quiet => (
- is => 'rw',
- isa => 'Bool',
+ is => 'rw',
+ isa => Bool,
);
has '_confirm' => (
- is => 'bare',
- isa => 'Bool',
+ is => 'bare',
+ isa => Bool,
);
+
+=head2 trace
+
+Toggle DBIx::Class debug output
+
+=cut
+
+has trace => (
+ is => 'rw',
+ isa => Bool,
+ trigger => \&_trigger_trace,
+);
+
+sub _trigger_trace {
+ my ($self, $new, $old) = @_;
+ $self->schema->storage->debug($new);
+}
+
+
=head1 METHODS
=head2 create
=back
-L<create> will generate sql for the supplied schema_class in sql_dir. The flavour of sql to
-generate can be controlled by suppling a sqlt_type which should be a L<SQL::Translator> name.
+C<create> will generate sql for the supplied schema_class in sql_dir. The
+flavour of sql to generate can be controlled by supplying a sqlt_type which
+should be a L<SQL::Translator> name.
Arguments for L<SQL::Translator> can be supplied in the sqlt_args hashref.
Optional preversion can be supplied to generate a diff to be used by upgrade.
+
=cut
sub create {
- my ($self, $sqlt_type, $sqlt_args, $preversion) = @_;
+ my ($self, $sqlt_type, $sqlt_args, $preversion) = @_;
- $preversion ||= $self->preversion();
+ $preversion ||= $self->preversion();
+ $sqlt_type ||= $self->sql_type();
- my $schema = $self->schema();
- # create the dir if does not exist
- $self->sql_dir->mkpath() if ( ! -d $self->sql_dir);
+ my $schema = $self->schema();
+ # create the dir if does not exist
+ $self->sql_dir->mkpath() if ( ! -d $self->sql_dir);
- $schema->create_ddl_dir( $sqlt_type, (defined $schema->schema_version ? $schema->schema_version : ""), $self->sql_dir->stringify, $preversion, $sqlt_args );
+ $schema->create_ddl_dir( $sqlt_type, (defined $schema->schema_version ? $schema->schema_version : ""), $self->sql_dir->stringify, $preversion, $sqlt_args );
}
+
=head2 upgrade
=over 4
upgrade will attempt to upgrade the connected database to the same version as the schema_class.
B<MAKE SURE YOU BACKUP YOUR DB FIRST>
+
=cut
sub upgrade {
- my ($self) = @_;
- my $schema = $self->schema();
- if (!$schema->get_db_version()) {
- # schema is unversioned
- die "could not determin current schema version, please either install or deploy";
- } else {
- my $ret = $schema->upgrade();
- return $ret;
- }
+ my ($self) = @_;
+ my $schema = $self->schema();
+
+ if (!$schema->get_db_version()) {
+ # schema is unversioned
+ $schema->throw_exception ("Could not determin current schema version, please either install() or deploy().\n");
+ } else {
+ $schema->upgrade_directory ($self->sql_dir) if $self->sql_dir; # this will override whatever default the schema has
+ my $ret = $schema->upgrade();
+ return $ret;
+ }
}
+
=head2 install
=over 4
=back
-install is here to help when you want to move to L<DBIx::Class::Schema::Versioned> and have an existing
-database. install will take a version and add the version tracking tables and 'install' the version. No
-further ddl modification takes place. Setting the force attribute to a true value will allow overriding of
+install is here to help when you want to move to L<DBIx::Class::Schema::Versioned> and have an existing
+database. install will take a version and add the version tracking tables and 'install' the version. No
+further ddl modification takes place. Setting the force attribute to a true value will allow overriding of
already versioned databases.
+
=cut
+
sub install {
- my ($self, $version) = @_;
-
- my $schema = $self->schema();
- $version ||= $self->version();
- if (!$schema->get_db_version() ) {
- # schema is unversioned
- print "Going to install schema version\n";
- my $ret = $schema->install($version);
- print "retun is $ret\n";
- }
- elsif ($schema->get_db_version() and $self->force ) {
- warn "forcing install may not be a good idea";
- if($self->_confirm() ) {
- # FIXME private api
- $self->schema->_set_db_version({ version => $version});
- }
- }
- else {
- die "schema already has a version not installing, try upgrade instead";
- }
+ my ($self, $version) = @_;
+
+ my $schema = $self->schema();
+ $version ||= $self->version();
+ if (!$schema->get_db_version() ) {
+ # schema is unversioned
+ print "Going to install schema version\n" if (!$self->quiet);
+ my $ret = $schema->install($version);
+ print "return is $ret\n" if (!$self->quiet);
+ }
+ elsif ($schema->get_db_version() and $self->force ) {
+ carp "Forcing install may not be a good idea";
+ if($self->_confirm() ) {
+ $self->schema->_set_db_version({ version => $version});
+ }
+ }
+ else {
+ $schema->throw_exception ("Schema already has a version. Try upgrade instead.\n");
+ }
}
+
=head2 deploy
=over 4
=back
-deploy will create the schema at the connected database. C<$args> are passed straight to
-L<DBIx::Class::Schema/deploy>.
+deploy will create the schema at the connected database. C<$args> are passed straight to
+L<DBIx::Class::Schema/deploy>.
+
=cut
+
sub deploy {
- my ($self, $args) = @_;
- my $schema = $self->schema();
- if (!$schema->get_db_version() ) {
- # schema is unversioned
- $schema->deploy( $args, $self->sql_dir)
- or die "could not deploy schema";
- } else {
- die "there already is a database with a version here, try upgrade instead";
- }
+ my ($self, $args) = @_;
+ my $schema = $self->schema();
+ $schema->deploy( $args, $self->sql_dir );
}
-
-# FIXME ensure option spec compatability
-#die('Do not use the where option with the insert op') if ($where);
-#die('Do not use the attrs option with the insert op') if ($attrs);
-
=head2 insert
=over 4
into that resultset
=cut
+
sub insert {
- my ($self, $rs, $set) = @_;
+ my ($self, $rs, $set) = @_;
- $rs ||= $self->resultset();
- $set ||= $self->set();
- my $resultset = $self->schema->resultset($rs);
- my $obj = $resultset->create( $set );
- print ''.ref($resultset).' ID: '.join(',',$obj->id())."\n" if (!$self->quiet);
+ $rs ||= $self->resultset();
+ $set ||= $self->set();
+ my $resultset = $self->schema->resultset($rs);
+ my $obj = $resultset->create( $set );
+ print ''.ref($resultset).' ID: '.join(',',$obj->id())."\n" if (!$self->quiet);
}
=head2 update
-=over 4
+=over 4
=item Arguments: $rs, $set, $where
=back
-update takes the name of a resultset from the schema_class, a hashref of data to update and
-a where hash used to form the search for the rows to update.
+update takes the name of a resultset from the schema_class, a hashref of data to update and
+a where hash used to form the search for the rows to update.
+
=cut
+
sub update {
- my ($self, $rs, $set, $where) = @_;
+ my ($self, $rs, $set, $where) = @_;
- $rs ||= $self->resultset();
- $where ||= $self->where();
- $set ||= $self->set();
- my $resultset = $self->schema->resultset($rs);
- $resultset = $resultset->search( ($where||{}) );
+ $rs ||= $self->resultset();
+ $where ||= $self->where();
+ $set ||= $self->set();
+ my $resultset = $self->schema->resultset($rs);
+ $resultset = $resultset->search( ($where||{}) );
- my $count = $resultset->count();
- print "This action will modify $count ".ref($resultset)." records.\n" if (!$self->quiet);
+ my $count = $resultset->count();
+ print "This action will modify $count ".ref($resultset)." records.\n" if (!$self->quiet);
- if ( $self->force || $self->_confirm() ) {
- $resultset->update_all( $set );
- }
+ if ( $self->force || $self->_confirm() ) {
+ $resultset->update_all( $set );
+ }
}
-# FIXME
-#die('Do not use the set option with the delete op') if ($set);
+
=head2 delete
=over 4
=back
-delete takes the name of a resultset from the schema_class, a where hashref and a attrs to pass to ->search.
+delete takes the name of a resultset from the schema_class, a where hashref and a attrs to pass to ->search.
The found data is deleted and cannot be recovered.
+
=cut
+
sub delete {
- my ($self, $rs, $where, $attrs) = @_;
+ my ($self, $rs, $where, $attrs) = @_;
- $rs ||= $self->resultset();
- $where ||= $self->where();
- $attrs ||= $self->attrs();
- my $resultset = $self->schema->resultset($rs);
- $resultset = $resultset->search( ($where||{}), ($attrs||()) );
+ $rs ||= $self->resultset();
+ $where ||= $self->where();
+ $attrs ||= $self->attrs();
+ my $resultset = $self->schema->resultset($rs);
+ $resultset = $resultset->search( ($where||{}), ($attrs||()) );
- my $count = $resultset->count();
- print "This action will delete $count ".ref($resultset)." records.\n" if (!$self->quiet);
+ my $count = $resultset->count();
+ print "This action will delete $count ".ref($resultset)." records.\n" if (!$self->quiet);
- if ( $self->force || $self->_confirm() ) {
- $resultset->delete_all();
- }
+ if ( $self->force || $self->_confirm() ) {
+ $resultset->delete_all();
+ }
}
+
=head2 select
=over 4
=back
-select takes the name of a resultset from the schema_class, a where hashref and a attrs to pass to ->search.
+select takes the name of a resultset from the schema_class, a where hashref and a attrs to pass to ->search.
The found data is returned in a array ref where the first row will be the columns list.
=cut
+
sub select {
- my ($self, $rs, $where, $attrs) = @_;
-
- $rs ||= $self->resultset();
- $where ||= $self->where();
- $attrs ||= $self->attrs();
- my $resultset = $self->schema->resultset($rs);
- $resultset = $resultset->search( ($where||{}), ($attrs||()) );
-
- my @data;
- my @columns = $resultset->result_source->columns();
- push @data, [@columns];#
-
- while (my $row = $resultset->next()) {
- my @fields;
- foreach my $column (@columns) {
- push( @fields, $row->get_column($column) );
- }
- push @data, [@fields];
- }
-
- return \@data;
+ my ($self, $rs, $where, $attrs) = @_;
+
+ $rs ||= $self->resultset();
+ $where ||= $self->where();
+ $attrs ||= $self->attrs();
+ my $resultset = $self->schema->resultset($rs);
+ $resultset = $resultset->search( ($where||{}), ($attrs||()) );
+
+ my @data;
+ my @columns = $resultset->result_source->columns();
+ push @data, [@columns];#
+
+ while (my $row = $resultset->next()) {
+ my @fields;
+ foreach my $column (@columns) {
+ push( @fields, $row->get_column($column) );
+ }
+ push @data, [@fields];
+ }
+
+ return \@data;
}
sub _confirm {
- my ($self) = @_;
- print "Are you sure you want to do this? (type YES to confirm) \n";
- # mainly here for testing
- return 1 if ($self->meta->get_attribute('_confirm')->get_value($self));
- my $response = <STDIN>;
- return 1 if ($response=~/^YES/);
- return;
-}
+ my ($self) = @_;
-sub _find_stanza {
- my ($self, $cfg, $stanza) = @_;
- my @path = split /::/, $stanza;
- while (my $path = shift @path) {
- if (exists $cfg->{$path}) {
- $cfg = $cfg->{$path};
- }
- else {
- die "could not find $stanza in config, $path did not seem to exist";
- }
- }
- return $cfg;
+ # mainly here for testing
+ return 1 if ($self->meta->get_attribute('_confirm')->get_value($self));
+
+ print "Are you sure you want to do this? (type YES to confirm) \n";
+ my $response = <STDIN>;
+
+ return ($response=~/^YES/);
}
-sub _json_to_data {
- my ($json_str) = @_;
- my $json = JSON::Any->new(allow_barekey => 1, allow_singlequote => 1, relaxed=>1);
- my $ret = $json->jsonToObj($json_str);
- return $ret;
+sub _find_stanza {
+ my ($self, $cfg, $stanza) = @_;
+ my @path = split /::/, $stanza;
+ while (my $path = shift @path) {
+ if (exists $cfg->{$path}) {
+ $cfg = $cfg->{$path};
+ }
+ else {
+ die ("Could not find $stanza in config, $path does not seem to exist.\n");
+ }
+ }
+ return $cfg;
}
=head1 AUTHOR
-Gordon Irving <goraxe@cpan.org>
-
-with code taken from dbicadmin by
-Aran Deltac <bluefeet@cpan.org>
-
+See L<DBIx::Class/CONTRIBUTORS>.
=head1 LICENSE
-You may distribute this code under the same terms as Perl itself.
+You may distribute this code under the same terms as Perl itself
+
=cut
+
1;